From a48bdeeb09af1ac26b7cd13fa91baa72c7fa6f94 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Wed, 1 Apr 2026 17:43:06 +0000 Subject: [PATCH 1/3] Initial plan From c2c40044d0fc10bbde5c6702384fb8221c0b7a93 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Wed, 1 Apr 2026 18:31:32 +0000 Subject: [PATCH 2/3] Transform test files to async/await for Database API migration - Add await to all async DB methods: createNode, getNode, updateNode, deleteNode, createEdge, getEdge, deleteEdge, export, import, close, mergeNode, mergeEdge, createPropertyIndex, listIndexes, dropIndex - Make transaction callbacks async with await on inner calls - Await NodeQuery terminal methods: exec(), first(), count(), exists() - Await TraversalQuery terminal methods: toArray(), toPaths(), etc. - Make beforeEach/afterEach/it() callbacks async where needed - Transform expect(() => asyncMethod()).toThrow() to rejects.toThrow() - Handle Array.from/map with async callbacks using Promise.all - Convert forEach(async ...) to await Promise.all(arr.map(async ...)) - PatternQuery.exec/first/count remain synchronous (no await) - traverse() sync validation unchanged Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com> Co-authored-by: michaeloboyle <61171+michaeloboyle@users.noreply.github.com> --- scripts/transform_tests_v2.py | 822 ++++++++++++++++++++ tests/integration/graph-operations.test.ts | 428 +++++----- tests/integration/job-pipeline.test.ts | 340 ++++---- tests/unit/Database-merge.test.ts | 244 +++--- tests/unit/Database.test.ts | 516 ++++++------ tests/unit/NodeQuery-both-direction.test.ts | 152 ++-- tests/unit/NodeQuery.test.ts | 394 +++++----- tests/unit/PatternQuery.test.ts | 94 +-- tests/unit/Transaction.test.ts | 192 +++-- tests/unit/TraversalQuery-paths.test.ts | 96 +-- tests/unit/TraversalQuery.test.ts | 368 ++++----- tests/unit/concurrency.test.ts | 56 +- 12 files changed, 2238 insertions(+), 1464 deletions(-) create mode 100644 scripts/transform_tests_v2.py diff --git a/scripts/transform_tests_v2.py b/scripts/transform_tests_v2.py new file mode 100644 index 0000000..2fdabd4 --- /dev/null +++ b/scripts/transform_tests_v2.py @@ -0,0 +1,822 @@ +#!/usr/bin/env python3 +""" +Transform test files from sync to async/await for Database API migration. +""" +import re +import sys +import os + +# Methods on db/testDb objects that are now async +ASYNC_DB_METHODS = [ + 'createNode', 'getNode', 'updateNode', 'deleteNode', + 'createEdge', 'getEdge', 'deleteEdge', + 'export', 'import', 'close', + 'mergeNode', 'mergeEdge', + 'createPropertyIndex', 'listIndexes', 'dropIndex', +] + +# NodeQuery terminal methods (now async) +NODEQUERY_EXEC = ['exec', 'first', 'count', 'exists'] + +# TraversalQuery terminal methods (now async) +TRAVERSAL_EXEC = ['toArray', 'toPaths', 'shortestPath', 'paths', 'allPaths'] + + +def find_matching_brace(s, start): + """Find matching } for s[start] == '{'""" + depth = 0 + i = start + in_str = None + esc = False + while i < len(s): + c = s[i] + if esc: + esc = False + elif c == '\\' and in_str: + esc = True + elif in_str: + if c == in_str: + in_str = None + elif c == '/' and i + 1 < len(s) and s[i+1] == '/': + # Line comment - skip to end of line + while i < len(s) and s[i] != '\n': + i += 1 + continue + elif c == '/' and i + 1 < len(s) and s[i+1] == '*': + # Block comment - skip to */ + i += 2 + while i + 1 < len(s) and not (s[i] == '*' and s[i+1] == '/'): + i += 1 + i += 2 + continue + elif c in ('"', "'", '`'): + in_str = c + elif c == '{': + depth += 1 + elif c == '}': + depth -= 1 + if depth == 0: + return i + i += 1 + return -1 + + +def find_matching_paren(s, start): + """Find matching ) for s[start] == '('""" + depth = 0 + i = start + in_str = None + esc = False + while i < len(s): + c = s[i] + if esc: + esc = False + elif c == '\\' and in_str: + esc = True + elif in_str: + if c == in_str: + in_str = None + elif c == '/' and i + 1 < len(s) and s[i+1] == '/': + # Line comment - skip to end of line + while i < len(s) and s[i] != '\n': + i += 1 + continue + elif c == '/' and i + 1 < len(s) and s[i+1] == '*': + # Block comment - skip to */ + i += 2 + while i + 1 < len(s) and not (s[i] == '*' and s[i+1] == '/'): + i += 1 + i += 2 + continue + elif c in ('"', "'", '`'): + in_str = c + elif c == '(': + depth += 1 + elif c == ')': + depth -= 1 + if depth == 0: + return i + i += 1 + return -1 + + +def needs_await_before(s, pos): + """Check if position pos in string s already has await before it.""" + # Look back up to 15 chars for 'await' + pre = s[max(0, pos - 15):pos] + return bool(re.search(r'\bawait\s*$', pre)) + + +def is_inside_expect_call(content, pos): + """Check if position is directly inside expect(...) as the direct argument (not inside a nested arrow fn).""" + pre = content[max(0, pos - 80):pos] + # Check if preceded by expect( (direct arg) or expect(() => (lambda wrapper) + return bool(re.search(r'expect\s*\(\s*$', pre)) or bool(re.search(r'expect\s*\(\s*\(\s*\)\s*=>\s*$', pre)) + + +def transform_db_methods(content): + """Add await before db.METHOD() and testDb.METHOD() and ctx calls.""" + # Object names that can have these methods + obj_re = r'(?:db|testDb|newDb|schemaDb|emptyDb|ctx|db2|db3)' + + for method in ASYNC_DB_METHODS: + pattern = re.compile(r'\b(' + obj_re + r')\.' + re.escape(method) + r'\s*\(') + result = [] + i = 0 + while i < len(content): + m = pattern.search(content, i) + if not m: + result.append(content[i:]) + break + if needs_await_before(content, m.start()) or is_inside_expect_call(content, m.start()): + result.append(content[i:m.end()]) + i = m.end() + continue + # Find the end of the method call + paren_start = m.end() - 1 + paren_end = find_matching_paren(content, paren_start) + if paren_end == -1: + result.append(content[i:m.start()]) + result.append('await ' + m.group(0)) + i = m.end() + continue + # Check if followed by property access (but not method call) + after = content[paren_end+1:paren_end+3] + if re.match(r'\.\w', after) and not re.match(r'\.\w+\s*\(', content[paren_end+1:paren_end+30]): + # Wrap in parens: (await db.METHOD(...)).property + call_text = content[m.start():paren_end+1] + result.append(content[i:m.start()]) + result.append('(await ' + call_text + ')') + i = paren_end + 1 + else: + result.append(content[i:m.start()]) + result.append('await ' + m.group(0)) + i = m.end() + content = ''.join(result) + + return content + + +def make_transaction_callback_async(inner): + """Make transaction callback async: (ctx) => { -> async (ctx) => {""" + cb_pattern = re.compile(r'^(\s*)(\([^)]*\))(\s*=>\s*)(\{)') + cb_match = cb_pattern.match(inner) + if cb_match and 'async' not in inner[:cb_match.end()]: + return (inner[:cb_match.start(2)] + + 'async ' + cb_match.group(2) + + cb_match.group(3) + cb_match.group(4) + + inner[cb_match.end():]) + return inner + + +def transform_transaction(content): + """Transform db.transaction((ctx) => { to await db.transaction(async (ctx) => {""" + result = [] + i = 0 + # Match db.transaction( with optional TypeScript generics like + tx_pattern = re.compile(r'\bdb\.transaction\s*(?:<[^>]*>)?\s*\(') + + while i < len(content): + m = tx_pattern.search(content, i) + if not m: + result.append(content[i:]) + break + + if needs_await_before(content, m.start()): + # Already has await - but still need to make callback async + paren_start = m.end() - 1 + paren_end = find_matching_paren(content, paren_start) + if paren_end != -1: + inner = content[m.end():paren_end] + inner_new = make_transaction_callback_async(inner) + result.append(content[i:m.end()]) + result.append(inner_new + content[paren_end]) + i = paren_end + 1 + else: + result.append(content[i:m.end()]) + i = m.end() + continue + + # Check if this is inside expect(...) - don't add await but still make callback async + pre = content[max(0, m.start()-60):m.start()] + inside_expect = bool(re.search(r'expect\s*\(\s*(?:\(\s*\)\s*=>\s*)?$', pre)) + + # Find the opening paren position + paren_start = m.end() - 1 + paren_end = find_matching_paren(content, paren_start) + + if paren_end == -1: + result.append(content[i:m.end()]) + i = m.end() + continue + + inner = content[m.end():paren_end] + inner_new = make_transaction_callback_async(inner) + + if inside_expect: + result.append(content[i:m.start()]) + result.append('db.transaction(' + inner_new + content[paren_end]) + else: + result.append(content[i:m.start()]) + result.append('await db.transaction(' + inner_new + content[paren_end]) + i = paren_end + 1 + + return ''.join(result) + + +def transform_nodequery_exec(content, is_pattern_query=False): + """Add await before NodeQuery terminal method calls.""" + if is_pattern_query: + # In PatternQuery test, db.pattern()...exec() stays sync + # Only db.nodes()...exec() needs await (but PatternQuery test doesn't have these) + return content + + # Pattern: IDENTIFIER.nodes( ... chain ... ).exec() + # Handles db.nodes(), db2.nodes(), emptyDb.nodes(), etc. + exec_methods = set(NODEQUERY_EXEC) + + result = [] + i = 0 + nodes_pattern = re.compile(r'\b(\w+)\.nodes\s*\(') + + while i < len(content): + m = nodes_pattern.search(content, i) + if not m: + result.append(content[i:]) + break + + if needs_await_before(content, m.start()): + # Already has await, just scan past + # But we still need to find the end of this chain + result.append(content[i:m.end()]) + i = m.end() + continue + + # Find the full chain + chain_start = m.start() + j = m.end() - 1 # position of '(' + paren_end = find_matching_paren(content, j) + if paren_end == -1: + result.append(content[i:m.end()]) + i = m.end() + continue + + # Now follow the method chain + k = paren_end + 1 + last_exec_end = -1 + + while k < len(content): + # Skip whitespace/newlines + ws = re.match(r'[ \t\n]*', content[k:]) + k += len(ws.group(0)) if ws else 0 + + if k >= len(content) or content[k] != '.': + break + + # Try to match method call + mm = re.match(r'\.(\w+)\s*\(', content[k:]) + if not mm: + break + + method_name = mm.group(1) + call_paren_pos = k + mm.end() - 1 + call_paren_end = find_matching_paren(content, call_paren_pos) + + if call_paren_end == -1: + break + + if method_name in NODEQUERY_EXEC: + last_exec_end = call_paren_end + + k = call_paren_end + 1 + + if last_exec_end != -1: + result.append(content[i:chain_start]) + result.append('await ' + content[chain_start:last_exec_end + 1]) + i = last_exec_end + 1 + else: + result.append(content[i:m.end()]) + i = m.end() + + final_content = ''.join(result) + + # Second pass: handle standalone variable.exec/first/count/exists() calls + # e.g., const results = query.exec() where query = db.nodes(...) + for method in NODEQUERY_EXEC: + second_result = [] + j = 0 + method_pattern = re.compile(r'\b(\w+)\.' + method + r'\s*\(\s*\)') + while j < len(final_content): + mm = method_pattern.search(final_content, j) + if not mm: + second_result.append(final_content[j:]) + break + if needs_await_before(final_content, mm.start()): + second_result.append(final_content[j:mm.end()]) + j = mm.end() + continue + var_name = mm.group(1) + # Skip known db chain starters (handled by first pass) and system objects + skip_vars = {'process', 'fs', 'path', 'require', 'console', 'JSON', + 'Object', 'Array', 'Math', 'db', 'db2', 'testDb', 'newDb', + 'schemaDb', 'emptyDb', 'typeof'} + if var_name in skip_vars: + second_result.append(final_content[j:mm.end()]) + j = mm.end() + continue + second_result.append(final_content[j:mm.start()]) + second_result.append('await ' + mm.group(0)) + j = mm.end() + final_content = ''.join(second_result) + + return final_content + + +def transform_traversal_exec(content): + """Add await before TraversalQuery terminal method chains.""" + exec_methods = set(TRAVERSAL_EXEC) + + result = [] + i = 0 + traverse_pattern = re.compile(r'\bdb\.traverse\s*\(') + + while i < len(content): + m = traverse_pattern.search(content, i) + if not m: + result.append(content[i:]) + break + + if needs_await_before(content, m.start()): + result.append(content[i:m.end()]) + i = m.end() + continue + + # Check if in expect(() => db.traverse(...)) - sync validation, skip + pre = content[max(0, m.start()-60):m.start()] + if re.search(r'expect\s*\(\s*\(\s*\)\s*=>\s*$', pre): + result.append(content[i:m.end()]) + i = m.end() + continue + + chain_start = m.start() + j = m.end() - 1 + paren_end = find_matching_paren(content, j) + if paren_end == -1: + result.append(content[i:m.end()]) + i = m.end() + continue + + k = paren_end + 1 + last_exec_end = -1 + + while k < len(content): + ws = re.match(r'[ \t\n]*', content[k:]) + k += len(ws.group(0)) if ws else 0 + + if k >= len(content) or content[k] != '.': + break + + mm = re.match(r'\.(\w+)\s*\(', content[k:]) + if not mm: + break + + method_name = mm.group(1) + call_paren_pos = k + mm.end() - 1 + call_paren_end = find_matching_paren(content, call_paren_pos) + + if call_paren_end == -1: + break + + if method_name in exec_methods: + last_exec_end = call_paren_end + + k = call_paren_end + 1 + + if last_exec_end != -1: + # Check if there are more method calls after the terminal method + # (e.g., .toPaths().filter(...)) - if so, wrap in parens + after_exec = content[last_exec_end+1:last_exec_end+3] + if re.match(r'\.\w', after_exec): + result.append(content[i:chain_start]) + result.append('(await ' + content[chain_start:last_exec_end + 1] + ')') + else: + result.append(content[i:chain_start]) + result.append('await ' + content[chain_start:last_exec_end + 1]) + i = last_exec_end + 1 + else: + result.append(content[i:m.end()]) + i = m.end() + + return ''.join(result) + + + +def transform_expect_throws(content): + """Transform expect(() => db.asyncMethod()).toThrow() patterns.""" + sync_only_methods = {'traverse', 'nodes', 'pattern'} + + # Pattern: expect(() => db.METHOD(...)).toThrow(...) + # Also: expect(() => db.transaction(async (ctx) => { ... })).toThrow(...) + + result = [] + i = 0 + pattern = re.compile(r'\bexpect\s*\(\s*\(\s*\)\s*=>\s*\{?') + + while i < len(content): + m = pattern.search(content, i) + if not m: + result.append(content[i:]) + break + + # Check what comes inside expect(() => ...) + # We need to determine if the inner function calls an async method + after_arrow = content[m.end():] + + # Find the inner expression + # Case 1: expect(() => db.method(...)) - no braces + # Case 2: expect(() => { db.method(...); }) - with braces + + matched_end = m.end() + has_brace = m.group(0).endswith('{') + + if has_brace: + # Find matching } + brace_start = m.end() - 1 + brace_end = find_matching_brace(content, brace_start) + if brace_end == -1: + result.append(content[i:m.end()]) + i = m.end() + continue + inner = content[brace_start+1:brace_end] + after_close = content[brace_end+1:] + # Find closing ) of expect + expect_paren_close_match = re.match(r'\s*\)', after_close) + if not expect_paren_close_match: + result.append(content[i:m.end()]) + i = m.end() + continue + expect_close_end = brace_end + 1 + expect_paren_close_match.end() + else: + # No braces - find the ) of expect( + # The ( of expect( is the first ( in the match + abs_paren = m.start() + m.group(0).index('(') + paren_end = find_matching_paren(content, abs_paren) + if paren_end == -1: + result.append(content[i:m.end()]) + i = m.end() + continue + inner = content[m.end():paren_end] + expect_close_end = paren_end + 1 + brace_end = None + + # Check if inner contains async DB method calls (any db-like variable) + async_method_re = '|'.join(re.escape(m2) for m2 in ASYNC_DB_METHODS) + inner_has_async = bool(re.search(r'\b\w+\.(?:' + async_method_re + r')\s*\(', inner)) + inner_has_transaction = bool(re.search(r'\b\w+\.transaction\s*\(', inner)) + + if not (inner_has_async or inner_has_transaction): + result.append(content[i:matched_end]) + i = matched_end + continue + + # Now look for .toThrow() after expect(...) + rest = content[expect_close_end:] + throw_match = re.match(r'\s*\.\s*toThrow\s*\(', rest) + if not throw_match: + result.append(content[i:matched_end]) + i = matched_end + continue + + throw_paren_start = expect_close_end + throw_match.end() - 1 + throw_paren_end = find_matching_paren(content, throw_paren_start) + if throw_paren_end == -1: + result.append(content[i:matched_end]) + i = matched_end + continue + + throw_arg = content[throw_paren_start+1:throw_paren_end] + + # Build the new expression + # Extract the actual call from inside expect(() => ...) + # Remove the () => wrapper + if has_brace: + # The inner is a block - extract the db call + # Find the primary db call in the block + async_call_m = re.search(r'(?:db\.(?:' + async_method_re + r'|transaction)\s*\()', inner) + if not async_call_m: + result.append(content[i:matched_end]) + i = matched_end + continue + call_start_in_inner = async_call_m.start() + # Find the full call + call_paren = inner.index('(', call_start_in_inner) + # Actually let's just extract the whole block differently + # For simplicity, just build: await expect((async () => { INNER })()).rejects.toThrow(ARG) + # Or better: extract the db.method(...) call + # Let's find the db.method(...) call in the block + inner_stripped = inner.strip() + # Remove leading/trailing whitespace and semicolons + call_match = re.search(r'(db\.(?:' + '|'.join(re.escape(x) for x in ASYNC_DB_METHODS) + r'|transaction)\s*\()', inner) + if call_match: + call_paren_pos_in_inner = inner.index('(', call_match.start()) + call_full_end_in_inner = find_matching_paren(inner, call_paren_pos_in_inner) + if call_full_end_in_inner != -1: + extracted_call = inner[call_match.start():call_full_end_in_inner+1] + # Check if the call is db.transaction with async callback + if 'transaction' in extracted_call and 'async' in extracted_call: + # Keep it as-is + pass + await_prefix = '' if needs_await_before(content, i) else 'await ' + new_code = f'{await_prefix}expect({extracted_call}).rejects.toThrow({throw_arg})' + result.append(content[i:m.start()]) + result.append(new_code) + i = throw_paren_end + 1 + continue + else: + # inner is the expression like: db.method(args) + # Strip leading/trailing whitespace + inner_stripped = inner.strip() + await_prefix = '' if needs_await_before(content, i) else 'await ' + new_code = f'{await_prefix}expect({inner_stripped}).rejects.toThrow({throw_arg})' + result.append(content[i:m.start()]) + result.append(new_code) + i = throw_paren_end + 1 + continue + + result.append(content[i:matched_end]) + i = matched_end + + return ''.join(result) + + +def make_callbacks_async(content): + """Make beforeEach, afterEach, and it() callbacks async where needed.""" + + def has_async_calls(body): + # Check if body has any async calls that need await + # Extended to include testDb., newDb., schemaDb., emptyDb. etc. + obj_pattern = r'(?:db|testDb|newDb|schemaDb|emptyDb|ctx|db2|db3)' + async_method_re = r'\b' + obj_pattern + r'\.(?:' + '|'.join(re.escape(m) for m in ASYNC_DB_METHODS) + r')\s*\(' + traversal_re = r'\.(?:toArray|toPaths|shortestPath|paths|allPaths)\s*\(' + tx_re = r'\bdb\.transaction\s*\(' + + return (bool(re.search(async_method_re, body)) or + bool(re.search(r'\bdb\.nodes\s*\(', body)) or # might chain to exec + bool(re.search(traversal_re, body)) or + bool(re.search(tx_re, body))) + + # Transform beforeEach(() => { to beforeEach(async () => { + for hook in ['beforeEach', 'afterEach']: + result = [] + i = 0 + pattern = re.compile(r'\b' + hook + r'\s*\(\s*\(\s*\)\s*=>\s*\{') + while i < len(content): + m = pattern.search(content, i) + if not m: + result.append(content[i:]) + break + brace_pos = m.end() - 1 + brace_end = find_matching_brace(content, brace_pos) + if brace_end == -1: + result.append(content[i:m.end()]) + i = m.end() + continue + body = content[brace_pos+1:brace_end] + if 'async' in m.group(0): + result.append(content[i:m.end()]) + elif has_async_calls(body): + new_hook = m.group(0).replace('() =>', 'async () =>', 1) + result.append(content[i:m.start()]) + result.append(new_hook) + else: + result.append(content[i:m.end()]) + i = m.end() + content = ''.join(result) + + # Transform it('name', () => { to it('name', async () => { + result = [] + i = 0 + # Match it( or test( with string first arg, then () => { + it_pattern = re.compile(r'\b(it|test)\s*\(\s*(?:"[^"]*"|\'[^\']*\'|`[^`]*`)\s*,\s*\(\s*\)\s*=>\s*\{') + while i < len(content): + m = it_pattern.search(content, i) + if not m: + result.append(content[i:]) + break + brace_pos = m.end() - 1 + brace_end = find_matching_brace(content, brace_pos) + if brace_end == -1: + result.append(content[i:m.end()]) + i = m.end() + continue + body = content[brace_pos+1:brace_end] + if has_async_calls(body): + new_it = re.sub(r'\(\s*\)\s*=>\s*\{$', 'async () => {', m.group(0)) + result.append(content[i:m.start()]) + result.append(new_it) + else: + result.append(content[i:m.end()]) + i = m.end() + content = ''.join(result) + + # Final pass: fix any remaining arrow functions with await inside that aren't async + # Pattern: (params) => { ... await ... } or identifier => { ... await ... } + result = [] + i = 0 + # Match both (params) => { and identifier => { + arrow_block_pattern = re.compile(r'(\([^()]*\)|\b\w+)\s*(=>)\s*\{') + while i < len(content): + m = arrow_block_pattern.search(content, i) + if not m: + result.append(content[i:]) + break + pre = content[max(0, m.start()-10):m.start()].rstrip() + # Skip if already async, or if preceded by ':' (TypeScript return type annotation) + if pre.endswith('async') or pre.endswith(':') or re.search(r':\s*$', pre): + result.append(content[i:m.end()]) + i = m.end() + continue + # For bare identifier case (not parenthesized), check char before it + if not m.group(1).startswith('('): + char_before = content[max(0, m.start()-1):m.start()] + if char_before in (':', ' ') and re.search(r':\s*\w*$', content[max(0, m.start()-20):m.start()]): + result.append(content[i:m.end()]) + i = m.end() + continue + brace_pos = m.end() - 1 + brace_end = find_matching_brace(content, brace_pos) + if brace_end == -1: + result.append(content[i:m.end()]) + i = m.end() + continue + body = content[brace_pos+1:brace_end] + if re.search(r'\bawait\b', body): + params = m.group(1) + result.append(content[i:m.start()]) + result.append(f'async {params} => {{') + i = m.end() + else: + result.append(content[i:m.end()]) + i = m.end() + content = ''.join(result) + + # Fix expression arrow functions with await: (params) => await ... or id => await ... + result = [] + i = 0 + arrow_expr_pattern = re.compile(r'(\([^()]*\)|\b\w+)\s*=>\s*(await\b)') + while i < len(content): + m = arrow_expr_pattern.search(content, i) + if not m: + result.append(content[i:]) + break + pre = content[max(0, m.start()-10):m.start()].rstrip() + if pre.endswith('async') or pre.endswith(':') or re.search(r':\s*$', pre): + result.append(content[i:m.end()]) + i = m.end() + continue + if not m.group(1).startswith('('): + if re.search(r':\s*\w*$', content[max(0, m.start()-20):m.start()]): + result.append(content[i:m.end()]) + i = m.end() + continue + params = m.group(1) + result.append(content[i:m.start()]) + result.append(f'async {params} => await') + i = m.start() + len(m.group(0)) + content = ''.join(result) + + return content + + +def transform_file(filepath, is_pattern_query=False): + with open(filepath, 'r') as f: + content = f.read() + + # Order matters: + # 1. Transform expect(() => async).toThrow() first (before adding await) + content = transform_expect_throws(content) + + # 2. Add await to direct db method calls + content = transform_db_methods(content) + + # 3. Transform transaction callbacks + content = transform_transaction(content) + + # 4. Add await to NodeQuery chains + if not is_pattern_query: + content = transform_nodequery_exec(content) + + # 5. Add await to TraversalQuery chains + content = transform_traversal_exec(content) + + # 6. Make callbacks async + content = make_callbacks_async(content) + + # 7. Wrap Array.from with async callback in Promise.all + # const X = Array.from({...}, async ...) -> const X = await Promise.all(Array.from({...}, async ...)) + content = re.sub( + r'(=\s*)(Array\.from\s*\(\s*\{[^}]*\}\s*,\s*async\s)', + lambda m: '= await Promise.all(' + m.group(2), + content + ) + # Close the extra paren - find each Array.from( that we wrapped and add closing ) + # This is handled by finding = await Promise.all(Array.from(...) and adding ) + # Actually, let's do it differently - line by line approach + # Fix: find lines with "await Promise.all(Array.from" that don't have matching close paren + + # Simpler: use regex to wrap complete Array.from(...) calls + # Reset - redo Array.from wrapping properly + content = re.sub( + r'(=\s*await Promise\.all\(Array\.from\s*\()', + '= await Promise.all(Array.from(', # undo the above + content + ) + # Proper Array.from wrapping: + result = [] + j = 0 + af_pattern = re.compile(r'((?:const|let|var)\s+\w+\s*=\s*)(Array\.from\s*\()') + while j < len(content): + mm = af_pattern.search(content, j) + if not mm: + result.append(content[j:]) + break + # Find the matching ) of Array.from( + paren_start = mm.start(2) + len('Array.from') + paren_start = content.index('(', mm.start(2)) + paren_end = find_matching_paren(content, paren_start) + if paren_end == -1: + result.append(content[j:mm.end()]) + j = mm.end() + continue + inner = content[mm.end():paren_end] + # Check if callback is async + if re.search(r',\s*async\s', inner) and not needs_await_before(content, mm.start(2)): + result.append(content[j:mm.start(2)]) + result.append('await Promise.all(Array.from(' + inner + '))') + j = paren_end + 1 + else: + result.append(content[j:mm.end()]) + j = mm.end() + content = ''.join(result) + + # Wrap .map(async ...) with await Promise.all(...) + result = [] + j = 0 + map_pattern = re.compile(r'((?:const|let|var)\s+\w+\s*=\s*)(\w+\.map\s*\()') + while j < len(content): + mm = map_pattern.search(content, j) + if not mm: + result.append(content[j:]) + break + paren_start = content.index('(', mm.start(2)) + paren_end = find_matching_paren(content, paren_start) + if paren_end == -1: + result.append(content[j:mm.end()]) + j = mm.end() + continue + inner = content[mm.end():paren_end] + if re.search(r'^\s*async\s', inner) and not needs_await_before(content, mm.start(2)): + result.append(content[j:mm.start(2)]) + result.append('await Promise.all(' + mm.group(2)[:-1] + 'map(' + inner + '))') + j = paren_end + 1 + else: + result.append(content[j:mm.end()]) + j = mm.end() + content = ''.join(result) + + # Convert .forEach(async ...) to await Promise.all(...map(async ...)) + result = [] + j = 0 + foreach_pattern = re.compile(r'(\w+)\.forEach\s*\(') + while j < len(content): + mm = foreach_pattern.search(content, j) + if not mm: + result.append(content[j:]) + break + paren_start = mm.end() - 1 + paren_end = find_matching_paren(content, paren_start) + if paren_end == -1: + result.append(content[j:mm.end()]) + j = mm.end() + continue + inner = content[mm.end():paren_end] + if re.search(r'^\s*async\s', inner) and not needs_await_before(content, mm.start()): + arr = mm.group(1) + result.append(content[j:mm.start()]) + result.append(f'await Promise.all({arr}.map({inner}))') + j = paren_end + 1 + else: + result.append(content[j:mm.end()]) + j = mm.end() + content = ''.join(result) + + return content + + +if __name__ == '__main__': + filepath = sys.argv[1] + is_pattern = 'PatternQuery' in filepath + result = transform_file(filepath, is_pattern_query=is_pattern) + with open(filepath, 'w') as f: + f.write(result) + print(f"Transformed: {filepath}") diff --git a/tests/integration/graph-operations.test.ts b/tests/integration/graph-operations.test.ts index d235de9..3b9addf 100644 --- a/tests/integration/graph-operations.test.ts +++ b/tests/integration/graph-operations.test.ts @@ -6,104 +6,104 @@ import { GraphExport } from '../../src/types'; * Integration tests for complex multi-step graph operations. * Tests combining CRUD, queries, traversals, transactions, and data export/import. */ -describe('Complex Graph Operations - Integration Tests', () => { +describe('Complex Graph Operations - Integration Tests', async () => { let db: GraphDatabase; beforeEach(() => { db = new GraphDatabase(':memory:'); }); - afterEach(() => { - db.close(); + afterEach(async () => { + await db.close(); }); - describe('Multi-Step Graph Transformations', () => { - it('should perform complex graph transformation with multiple operations', () => { + describe('Multi-Step Graph Transformations', async () => { + it('should perform complex graph transformation with multiple operations', async () => { // Step 1: Create initial graph structure const nodes = { - a: db.createNode('Node', { label: 'A', value: 1 }), - b: db.createNode('Node', { label: 'B', value: 2 }), - c: db.createNode('Node', { label: 'C', value: 3 }), - d: db.createNode('Node', { label: 'D', value: 4 }) + a: await db.createNode('Node', { label: 'A', value: 1 }), + b: await db.createNode('Node', { label: 'B', value: 2 }), + c: await db.createNode('Node', { label: 'C', value: 3 }), + d: await db.createNode('Node', { label: 'D', value: 4 }) }; - db.createEdge(nodes.a.id, 'LINKS_TO', nodes.b.id, { weight: 1 }); - db.createEdge(nodes.b.id, 'LINKS_TO', nodes.c.id, { weight: 2 }); - db.createEdge(nodes.c.id, 'LINKS_TO', nodes.d.id, { weight: 3 }); - db.createEdge(nodes.d.id, 'LINKS_TO', nodes.a.id, { weight: 4 }); // Cycle + await db.createEdge(nodes.a.id, 'LINKS_TO', nodes.b.id, { weight: 1 }); + await db.createEdge(nodes.b.id, 'LINKS_TO', nodes.c.id, { weight: 2 }); + await db.createEdge(nodes.c.id, 'LINKS_TO', nodes.d.id, { weight: 3 }); + await db.createEdge(nodes.d.id, 'LINKS_TO', nodes.a.id, { weight: 4 }); // Cycle // Step 2: Query and transform - const allNodes = db.nodes('Node').exec(); + const allNodes = await db.nodes('Node').exec(); expect(allNodes).toHaveLength(4); // Step 3: Add metadata to all nodes - allNodes.forEach(node => { - const outgoing = db.traverse(node.id).out('LINKS_TO').toArray(); - const incoming = db.traverse(node.id).in('LINKS_TO').toArray(); + await Promise.all(allNodes.map(async node => { + const outgoing = await db.traverse(node.id).out('LINKS_TO').toArray(); + const incoming = await db.traverse(node.id).in('LINKS_TO').toArray(); - db.updateNode(node.id, { + await db.updateNode(node.id, { degree: outgoing.length + incoming.length, outDegree: outgoing.length, inDegree: incoming.length }); - }); + })); // Step 4: Verify transformations - const updatedA = db.getNode(nodes.a.id); + const updatedA = await db.getNode(nodes.a.id); expect(updatedA?.properties.degree).toBe(2); // 1 out, 1 in expect(updatedA?.properties.outDegree).toBe(1); expect(updatedA?.properties.inDegree).toBe(1); // Step 5: Add derived relationships - allNodes.forEach(node => { - const twoHopNeighbors = db.traverse(node.id) + await Promise.all(allNodes.map(async node => { + const twoHopNeighbors = await db.traverse(node.id) .out('LINKS_TO') .maxDepth(2) .minDepth(2) .toArray(); - twoHopNeighbors.forEach(neighbor => { + twoHopNeighbors.forEach(async neighbor => { // Create "indirect" relationship - db.createEdge(node.id, 'INDIRECT', neighbor.id, { hops: 2 }); + await db.createEdge(node.id, 'INDIRECT', neighbor.id, { hops: 2 }); }); - }); + })); // Step 6: Verify derived relationships - const indirectFromA = db.traverse(nodes.a.id) + const indirectFromA = await db.traverse(nodes.a.id) .out('INDIRECT') .toArray(); expect(indirectFromA.length).toBeGreaterThan(0); }); - it('should build and query hierarchical taxonomy', () => { + it('should build and query hierarchical taxonomy', async () => { // Build skill taxonomy - const programming = db.createNode('Category', { name: 'Programming', level: 0 }); - const languages = db.createNode('Category', { name: 'Languages', level: 1 }); - const frameworks = db.createNode('Category', { name: 'Frameworks', level: 1 }); + const programming = await db.createNode('Category', { name: 'Programming', level: 0 }); + const languages = await db.createNode('Category', { name: 'Languages', level: 1 }); + const frameworks = await db.createNode('Category', { name: 'Frameworks', level: 1 }); - db.createEdge(programming.id, 'PARENT_OF', languages.id); - db.createEdge(programming.id, 'PARENT_OF', frameworks.id); + await db.createEdge(programming.id, 'PARENT_OF', languages.id); + await db.createEdge(programming.id, 'PARENT_OF', frameworks.id); - const webLangs = db.createNode('Category', { name: 'Web Languages', level: 2 }); - const systemsLangs = db.createNode('Category', { name: 'Systems Languages', level: 2 }); + const webLangs = await db.createNode('Category', { name: 'Web Languages', level: 2 }); + const systemsLangs = await db.createNode('Category', { name: 'Systems Languages', level: 2 }); - db.createEdge(languages.id, 'PARENT_OF', webLangs.id); - db.createEdge(languages.id, 'PARENT_OF', systemsLangs.id); + await db.createEdge(languages.id, 'PARENT_OF', webLangs.id); + await db.createEdge(languages.id, 'PARENT_OF', systemsLangs.id); // Add actual skills - const js = db.createNode('Skill', { name: 'JavaScript' }); - const ts = db.createNode('Skill', { name: 'TypeScript' }); - const rust = db.createNode('Skill', { name: 'Rust' }); - const react = db.createNode('Skill', { name: 'React' }); + const js = await db.createNode('Skill', { name: 'JavaScript' }); + const ts = await db.createNode('Skill', { name: 'TypeScript' }); + const rust = await db.createNode('Skill', { name: 'Rust' }); + const react = await db.createNode('Skill', { name: 'React' }); - db.createEdge(js.id, 'BELONGS_TO', webLangs.id); - db.createEdge(ts.id, 'BELONGS_TO', webLangs.id); - db.createEdge(rust.id, 'BELONGS_TO', systemsLangs.id); - db.createEdge(react.id, 'BELONGS_TO', frameworks.id); + await db.createEdge(js.id, 'BELONGS_TO', webLangs.id); + await db.createEdge(ts.id, 'BELONGS_TO', webLangs.id); + await db.createEdge(rust.id, 'BELONGS_TO', systemsLangs.id); + await db.createEdge(react.id, 'BELONGS_TO', frameworks.id); // Query all skills under "Languages" category - const languageCategories = db.traverse(languages.id) + const languageCategories = await db.traverse(languages.id) .out('PARENT_OF') .toArray(); @@ -115,20 +115,19 @@ describe('Complex Graph Operations - Integration Tests', () => { while (categories.length > 0) { const catId = categories.shift()!; - const children = db.traverse(catId).out('PARENT_OF').toArray(); + const children = await db.traverse(catId).out('PARENT_OF').toArray(); categories.push(...children.map(c => c.id)); - const skills = db.traverse(catId).in('BELONGS_TO').toArray(); + const skills = await db.traverse(catId).in('BELONGS_TO').toArray(); allSkills.push(...skills); } expect(allSkills.length).toBeGreaterThan(0); }); - it('should handle complex filtering and aggregation', () => { + it('should handle complex filtering and aggregation', async () => { // Create diverse dataset - const companies = Array.from({ length: 10 }, (_, i) => - db.createNode('Company', { + const companies = await Promise.all(Array.from({ length: 10 }, async (_, i) => await db.createNode('Company', { name: `Company ${i}`, size: ['small', 'medium', 'large'][i % 3], founded: 2000 + i, @@ -136,8 +135,7 @@ describe('Complex Graph Operations - Integration Tests', () => { }) ); - const jobs = Array.from({ length: 30 }, (_, i) => - db.createNode('Job', { + const jobs = await Promise.all(Array.from({ length: 30 }, async (_, i) => await db.createNode('Job', { title: `Job ${i}`, salary: 80000 + i * 5000, remote: i % 2 === 0, @@ -146,22 +144,22 @@ describe('Complex Graph Operations - Integration Tests', () => { ); // Link jobs to companies - jobs.forEach((job, i) => { + await Promise.all(jobs.map(async (job, i) => { const company = companies[i % companies.length]; - db.createEdge(job.id, 'POSTED_BY', company.id); - }); + await db.createEdge(job.id, 'POSTED_BY', company.id); + })); // Complex aggregation: average salary by company size const sizeGroups = { small: [], medium: [], large: [] } as any; - companies.forEach(company => { - const companyJobs = db.traverse(company.id).in('POSTED_BY').toArray(); + await Promise.all(companies.map(async company => { + const companyJobs = await db.traverse(company.id).in('POSTED_BY').toArray(); const avgSalary = companyJobs.reduce((sum, job) => sum + job.properties.salary, 0) / companyJobs.length; const size = company.properties.size; sizeGroups[size].push(avgSalary); - }); + })); // Calculate averages const avgBySizeArray = Object.entries(sizeGroups).map(([size, salaries]: [string, any]) => ({ @@ -175,8 +173,8 @@ describe('Complex Graph Operations - Integration Tests', () => { }); // Find companies with high-paying jobs - const highPayCompanies = companies.filter(company => { - const companyJobs = db.traverse(company.id).in('POSTED_BY').toArray(); + const highPayCompanies = companies.filter(async company => { + const companyJobs = await db.traverse(company.id).in('POSTED_BY').toArray(); return companyJobs.some(job => job.properties.salary >= 150000); }); @@ -184,72 +182,72 @@ describe('Complex Graph Operations - Integration Tests', () => { }); }); - describe('Transaction Scenarios with Savepoints', () => { - it('should handle partial rollback with savepoints', () => { - const result = db.transaction(ctx => { + describe('Transaction Scenarios with Savepoints', async () => { + it('should handle partial rollback with savepoints', async () => { + const result = await db.transaction(async ctx => { // Create first batch - const job1 = db.createNode('Job', { title: 'Job 1', batch: 1 }); - const job2 = db.createNode('Job', { title: 'Job 2', batch: 1 }); + const job1 = await db.createNode('Job', { title: 'Job 1', batch: 1 }); + const job2 = await db.createNode('Job', { title: 'Job 2', batch: 1 }); ctx.savepoint('batch1'); // Create second batch - const job3 = db.createNode('Job', { title: 'Job 3', batch: 2 }); - const job4 = db.createNode('Job', { title: 'Job 4', batch: 2 }); + const job3 = await db.createNode('Job', { title: 'Job 3', batch: 2 }); + const job4 = await db.createNode('Job', { title: 'Job 4', batch: 2 }); ctx.savepoint('batch2'); // Create third batch - const job5 = db.createNode('Job', { title: 'Job 5', batch: 3 }); + const job5 = await db.createNode('Job', { title: 'Job 5', batch: 3 }); // Rollback third batch ctx.rollbackTo('batch2'); // Create replacement for batch 3 - const job6 = db.createNode('Job', { title: 'Job 6', batch: 3 }); + const job6 = await db.createNode('Job', { title: 'Job 6', batch: 3 }); return { job1, job2, job3, job4, job6 }; }); // Verify Job 5 was rolled back, Job 6 exists - const allJobs = db.nodes('Job').exec(); + const allJobs = await db.nodes('Job').exec(); expect(allJobs).toHaveLength(5); const titles = allJobs.map(j => j.properties.title).sort(); expect(titles).toEqual(['Job 1', 'Job 2', 'Job 3', 'Job 4', 'Job 6']); }); - it('should handle complex transaction with error recovery', () => { - const result = db.transaction(ctx => { - const company = db.createNode('Company', { name: 'Test Company' }); + it('should handle complex transaction with error recovery', async () => { + const result = await db.transaction(async ctx => { + const company = await db.createNode('Company', { name: 'Test Company' }); ctx.savepoint('company_created'); try { - const job1 = db.createNode('Job', { title: 'Job 1' }); - db.createEdge(job1.id, 'POSTED_BY', company.id); + const job1 = await db.createNode('Job', { title: 'Job 1' }); + await db.createEdge(job1.id, 'POSTED_BY', company.id); ctx.savepoint('job1_created'); - const job2 = db.createNode('Job', { title: 'Job 2' }); + const job2 = await db.createNode('Job', { title: 'Job 2' }); // Simulate error condition if (job2.properties.title === 'Job 2') { throw new Error('Simulated error'); } - db.createEdge(job2.id, 'POSTED_BY', company.id); + await db.createEdge(job2.id, 'POSTED_BY', company.id); } catch (error) { // Rollback to after job1 ctx.rollbackTo('job1_created'); // Create alternative job2 - const job2Alt = db.createNode('Job', { title: 'Job 2 Alt' }); - db.createEdge(job2Alt.id, 'POSTED_BY', company.id); + const job2Alt = await db.createNode('Job', { title: 'Job 2 Alt' }); + await db.createEdge(job2Alt.id, 'POSTED_BY', company.id); } return company.id; }); // Verify recovery worked - const jobs = db.traverse(result).in('POSTED_BY').toArray(); + const jobs = await db.traverse(result).in('POSTED_BY').toArray(); expect(jobs).toHaveLength(2); const titles = jobs.map(j => j.properties.title).sort(); @@ -258,75 +256,73 @@ describe('Complex Graph Operations - Integration Tests', () => { expect(titles).not.toContain('Job 2'); }); - it('should support nested transaction-like operations', () => { - db.transaction(ctx => { + it('should support nested transaction-like operations', async () => { + await db.transaction(async ctx => { // Outer operation - const project = db.createNode('Project', { name: 'Project A' }); + const project = await db.createNode('Project', { name: 'Project A' }); ctx.savepoint('project'); // Inner operation 1 - db.transaction(innerCtx => { - const task1 = db.createNode('Task', { title: 'Task 1' }); - db.createEdge(task1.id, 'PART_OF', project.id); + db.transaction(async innerCtx => { + const task1 = await db.createNode('Task', { title: 'Task 1' }); + await db.createEdge(task1.id, 'PART_OF', project.id); }); ctx.savepoint('task1'); // Inner operation 2 - db.transaction(innerCtx => { - const task2 = db.createNode('Task', { title: 'Task 2' }); - db.createEdge(task2.id, 'PART_OF', project.id); + db.transaction(async innerCtx => { + const task2 = await db.createNode('Task', { title: 'Task 2' }); + await db.createEdge(task2.id, 'PART_OF', project.id); }); // Verify both tasks exist - const tasks = db.traverse(project.id).in('PART_OF').toArray(); + const tasks = await db.traverse(project.id).in('PART_OF').toArray(); expect(tasks).toHaveLength(2); }); // Verify everything committed - const projects = db.nodes('Project').exec(); + const projects = await db.nodes('Project').exec(); expect(projects).toHaveLength(1); - const allTasks = db.nodes('Task').exec(); + const allTasks = await db.nodes('Task').exec(); expect(allTasks).toHaveLength(2); }); - it('should rollback entire transaction on error', () => { - expect(() => { - db.transaction(ctx => { - const node1 = db.createNode('Node', { label: 'Node 1' }); - const node2 = db.createNode('Node', { label: 'Node 2' }); + it('should rollback entire transaction on error', async () => { + await expect(db.transaction(async ctx => { + const node1 = await db.createNode('Node', { label: 'Node 1' }); + const node2 = await db.createNode('Node', { label: 'Node 2' }); - db.createEdge(node1.id, 'LINKS', node2.id); + await db.createEdge(node1.id, 'LINKS', node2.id); // Create savepoint ctx.savepoint('after_nodes'); - const node3 = db.createNode('Node', { label: 'Node 3' }); + const node3 = await db.createNode('Node', { label: 'Node 3' }); // Throw error - should rollback everything throw new Error('Transaction failed'); - }); - }).toThrow('Transaction failed'); + })).rejects.toThrow('Transaction failed'); // Verify nothing was committed - const nodes = db.nodes('Node').exec(); + const nodes = await db.nodes('Node').exec(); expect(nodes).toHaveLength(0); }); }); - describe('Export and Import Operations', () => { - it('should export and import complete graph', () => { + describe('Export and Import Operations', async () => { + it('should export and import complete graph', async () => { // Create original graph - const company = db.createNode('Company', { name: 'TestCorp', size: 'medium' }); - const job = db.createNode('Job', { title: 'Engineer', salary: 150000 }); - const skill = db.createNode('Skill', { name: 'TypeScript', category: 'programming' }); + const company = await db.createNode('Company', { name: 'TestCorp', size: 'medium' }); + const job = await db.createNode('Job', { title: 'Engineer', salary: 150000 }); + const skill = await db.createNode('Skill', { name: 'TypeScript', category: 'programming' }); - db.createEdge(job.id, 'POSTED_BY', company.id); - db.createEdge(job.id, 'REQUIRES', skill.id, { level: 'expert' }); + await db.createEdge(job.id, 'POSTED_BY', company.id); + await db.createEdge(job.id, 'REQUIRES', skill.id, { level: 'expert' }); // Export - const exported = db.export(); + const exported = await db.export(); // Verify export structure expect(exported.nodes).toHaveLength(3); @@ -336,14 +332,14 @@ describe('Complex Graph Operations - Integration Tests', () => { // Create new database and import const db2 = new GraphDatabase(':memory:'); - db2.import(exported); + await db2.import(exported); // Verify imported data - const importedCompanies = db2.nodes('Company').exec(); + const importedCompanies = await db2.nodes('Company').exec(); expect(importedCompanies).toHaveLength(1); expect(importedCompanies[0].properties.name).toBe('TestCorp'); - const importedJobs = db2.nodes('Job').exec(); + const importedJobs = await db2.nodes('Job').exec(); expect(importedJobs).toHaveLength(1); // Verify relationships @@ -352,27 +348,26 @@ describe('Complex Graph Operations - Integration Tests', () => { .toArray(); expect(jobCompanies).toHaveLength(1); - db2.close(); + await db2.close(); }); - it('should handle large graph export/import', () => { + it('should handle large graph export/import', async () => { // Create larger graph const nodeCount = 100; - const nodes = Array.from({ length: nodeCount }, (_, i) => - db.createNode('Node', { index: i, value: Math.random() }) + const nodes = await Promise.all(Array.from({ length: nodeCount }, async (_, i) => await db.createNode('Node', { index: i, value: Math.random() }) ); // Create edges (each node connects to next 3) - nodes.forEach((node, i) => { + await Promise.all(nodes.map(async (node, i) => { for (let j = 1; j <= 3; j++) { const targetIndex = (i + j) % nodeCount; - db.createEdge(node.id, 'LINKS', nodes[targetIndex].id, { weight: j }); + await db.createEdge(node.id, 'LINKS', nodes[targetIndex].id, { weight: j }); } - }); + })); // Export const startExport = Date.now(); - const exported = db.export(); + const exported = await db.export(); const exportTime = Date.now() - startExport; expect(exported.nodes).toHaveLength(nodeCount); @@ -381,11 +376,11 @@ describe('Complex Graph Operations - Integration Tests', () => { // Import to new database const db2 = new GraphDatabase(':memory:'); const startImport = Date.now(); - db2.import(exported); + await db2.import(exported); const importTime = Date.now() - startImport; // Verify - const importedNodes = db2.nodes('Node').exec(); + const importedNodes = await db2.nodes('Node').exec(); expect(importedNodes).toHaveLength(nodeCount); // Verify random node's connections @@ -401,11 +396,11 @@ describe('Complex Graph Operations - Integration Tests', () => { expect(exportTime).toBeLessThan(1000); expect(importTime).toBeLessThan(2000); - db2.close(); + await db2.close(); }); - it('should preserve data types in export/import', () => { - const node = db.createNode('Test', { + it('should preserve data types in export/import', async () => { + const node = await db.createNode('Test', { string: 'hello', number: 42, boolean: true, @@ -415,11 +410,11 @@ describe('Complex Graph Operations - Integration Tests', () => { date: new Date().toISOString() }); - const exported = db.export(); + const exported = await db.export(); const db2 = new GraphDatabase(':memory:'); - db2.import(exported); + await db2.import(exported); - const imported = db2.nodes('Test').first(); + const imported = await db2.nodes('Test').first(); expect(imported).toBeTruthy(); expect(imported!.properties.string).toBe('hello'); expect(imported!.properties.number).toBe(42); @@ -429,62 +424,62 @@ describe('Complex Graph Operations - Integration Tests', () => { expect(imported!.properties.object).toEqual({ nested: 'value' }); expect(imported!.properties.date).toBeDefined(); - db2.close(); + await db2.close(); }); - it('should handle incremental import without duplicates', () => { + it('should handle incremental import without duplicates', async () => { // Initial data - const company = db.createNode('Company', { name: 'Company A', id: 'comp-a' }); - const job1 = db.createNode('Job', { title: 'Job 1', id: 'job-1' }); - db.createEdge(job1.id, 'POSTED_BY', company.id); + const company = await db.createNode('Company', { name: 'Company A', id: 'comp-a' }); + const job1 = await db.createNode('Job', { title: 'Job 1', id: 'job-1' }); + await db.createEdge(job1.id, 'POSTED_BY', company.id); - const export1 = db.export(); + const export1 = await db.export(); // Add more data - const job2 = db.createNode('Job', { title: 'Job 2', id: 'job-2' }); - db.createEdge(job2.id, 'POSTED_BY', company.id); + const job2 = await db.createNode('Job', { title: 'Job 2', id: 'job-2' }); + await db.createEdge(job2.id, 'POSTED_BY', company.id); // Export again - this will include all data - const export2 = db.export(); + const export2 = await db.export(); // New database with first export const db2 = new GraphDatabase(':memory:'); - db2.import(export1); + await db2.import(export1); // Import second export (will have duplicates) - db2.import(export2); + await db2.import(export2); // Count nodes - will have duplicates because import doesn't check - const companies = db2.nodes('Company').exec(); - const jobs = db2.nodes('Job').exec(); + const companies = await db2.nodes('Company').exec(); + const jobs = await db2.nodes('Job').exec(); // This shows import creates duplicates - would need application-level deduplication expect(companies.length).toBeGreaterThanOrEqual(1); expect(jobs.length).toBeGreaterThanOrEqual(2); - db2.close(); + await db2.close(); }); }); - describe('Path Finding and Graph Algorithms', () => { - it('should find shortest path between nodes', () => { + describe('Path Finding and Graph Algorithms', async () => { + it('should find shortest path between nodes', async () => { // Create graph: A -> B -> C -> D // \ / // -> E ---- - const a = db.createNode('Node', { label: 'A' }); - const b = db.createNode('Node', { label: 'B' }); - const c = db.createNode('Node', { label: 'C' }); - const d = db.createNode('Node', { label: 'D' }); - const e = db.createNode('Node', { label: 'E' }); - - db.createEdge(a.id, 'LINKS', b.id); - db.createEdge(b.id, 'LINKS', c.id); - db.createEdge(c.id, 'LINKS', d.id); - db.createEdge(a.id, 'LINKS', e.id); - db.createEdge(e.id, 'LINKS', d.id); + const a = await db.createNode('Node', { label: 'A' }); + const b = await db.createNode('Node', { label: 'B' }); + const c = await db.createNode('Node', { label: 'C' }); + const d = await db.createNode('Node', { label: 'D' }); + const e = await db.createNode('Node', { label: 'E' }); + + await db.createEdge(a.id, 'LINKS', b.id); + await db.createEdge(b.id, 'LINKS', c.id); + await db.createEdge(c.id, 'LINKS', d.id); + await db.createEdge(a.id, 'LINKS', e.id); + await db.createEdge(e.id, 'LINKS', d.id); // Find shortest path from A to D - const path = db.traverse(a.id).shortestPath(d.id); + const path = await db.traverse(a.id).shortestPath(d.id); expect(path).toBeDefined(); expect(path!.length).toBe(3); // A -> E -> D (or A -> B -> C -> D) @@ -494,15 +489,15 @@ describe('Complex Graph Operations - Integration Tests', () => { expect(labels[labels.length - 1]).toBe('D'); }); - it('should detect cycles in graph', () => { + it('should detect cycles in graph', async () => { // Create cycle: A -> B -> C -> A - const a = db.createNode('Node', { label: 'A' }); - const b = db.createNode('Node', { label: 'B' }); - const c = db.createNode('Node', { label: 'C' }); + const a = await db.createNode('Node', { label: 'A' }); + const b = await db.createNode('Node', { label: 'B' }); + const c = await db.createNode('Node', { label: 'C' }); - db.createEdge(a.id, 'LINKS', b.id); - db.createEdge(b.id, 'LINKS', c.id); - db.createEdge(c.id, 'LINKS', a.id); + await db.createEdge(a.id, 'LINKS', b.id); + await db.createEdge(b.id, 'LINKS', c.id); + await db.createEdge(c.id, 'LINKS', a.id); // Traverse with cycle detection (limited depth) const visited = new Set(); @@ -511,7 +506,7 @@ describe('Complex Graph Operations - Integration Tests', () => { if (visited.has(nodeId)) return true; visited.add(nodeId); - const neighbors = db.traverse(nodeId).out('LINKS').toArray(); + const neighbors = await db.traverse(nodeId).out('LINKS').toArray(); for (const neighbor of neighbors) { if (hasCycle(neighbor.id, depth + 1)) return true; @@ -523,21 +518,21 @@ describe('Complex Graph Operations - Integration Tests', () => { expect(hasCycle(a.id, 0)).toBe(true); }); - it('should find all paths between nodes', () => { + it('should find all paths between nodes', async () => { // Create diamond graph: A -> B -> D // \-> C ->/ - const a = db.createNode('Node', { label: 'A' }); - const b = db.createNode('Node', { label: 'B' }); - const c = db.createNode('Node', { label: 'C' }); - const d = db.createNode('Node', { label: 'D' }); + const a = await db.createNode('Node', { label: 'A' }); + const b = await db.createNode('Node', { label: 'B' }); + const c = await db.createNode('Node', { label: 'C' }); + const d = await db.createNode('Node', { label: 'D' }); - db.createEdge(a.id, 'LINKS', b.id); - db.createEdge(a.id, 'LINKS', c.id); - db.createEdge(b.id, 'LINKS', d.id); - db.createEdge(c.id, 'LINKS', d.id); + await db.createEdge(a.id, 'LINKS', b.id); + await db.createEdge(a.id, 'LINKS', c.id); + await db.createEdge(b.id, 'LINKS', d.id); + await db.createEdge(c.id, 'LINKS', d.id); // Find all paths using traversal - const paths = db.traverse(a.id) + const paths = await db.traverse(a.id) .out('LINKS') .maxDepth(3) .paths(d.id); @@ -553,28 +548,27 @@ describe('Complex Graph Operations - Integration Tests', () => { expect(pathStrings).toContainEqual('A->C->D'); }); - it('should calculate node centrality', () => { + it('should calculate node centrality', async () => { // Create star topology: Center connected to 5 outer nodes - const center = db.createNode('Node', { label: 'Center' }); - const outer = Array.from({ length: 5 }, (_, i) => - db.createNode('Node', { label: `Outer ${i}` }) + const center = await db.createNode('Node', { label: 'Center' }); + const outer = await Promise.all(Array.from({ length: 5 }, async (_, i) => await db.createNode('Node', { label: `Outer ${i}` }) ); - outer.forEach(node => { - db.createEdge(center.id, 'LINKS', node.id); - db.createEdge(node.id, 'LINKS', center.id); - }); + await Promise.all(outer.map(async node => { + await db.createEdge(center.id, 'LINKS', node.id); + await db.createEdge(node.id, 'LINKS', center.id); + })); // Calculate degree centrality - const allNodes = db.nodes('Node').exec(); - const centrality = allNodes.map(node => { - const outgoing = db.traverse(node.id).out('LINKS').toArray(); - const incoming = db.traverse(node.id).in('LINKS').toArray(); + const allNodes = await db.nodes('Node').exec(); + const centrality = await Promise.all(allNodes.mapmap(async node => { + const outgoing = await db.traverse(node.id).out('LINKS').toArray(); + const incoming = await db.traverse(node.id).in('LINKS').toArray(); return { label: node.properties.label, degree: outgoing.length + incoming.length }; - }); + })); const centerNode = centrality.find(n => n.label === 'Center'); expect(centerNode?.degree).toBe(10); // 5 out + 5 in @@ -586,69 +580,67 @@ describe('Complex Graph Operations - Integration Tests', () => { }); }); - describe('Data Integrity Under Stress', () => { - it('should maintain consistency with rapid updates', () => { - const node = db.createNode('Counter', { value: 0 }); + describe('Data Integrity Under Stress', async () => { + it('should maintain consistency with rapid updates', async () => { + const node = await db.createNode('Counter', { value: 0 }); // Simulate rapid updates - db.transaction(() => { + await db.transaction(async () => { for (let i = 0; i < 100; i++) { - const current = db.getNode(node.id); - db.updateNode(node.id, { value: current!.properties.value + 1 }); + const current = await db.getNode(node.id); + await db.updateNode(node.id, { value: current!.properties.value + 1 }); } }); - const final = db.getNode(node.id); + const final = await db.getNode(node.id); expect(final?.properties.value).toBe(100); }); - it('should handle complex concurrent operations in transaction', () => { - db.transaction(() => { - const nodes = Array.from({ length: 10 }, (_, i) => - db.createNode('Node', { index: i }) + it('should handle complex concurrent operations in transaction', async () => { + await db.transaction(async () => { + const nodes = await Promise.all(Array.from({ length: 10 }, async (_, i) => await db.createNode('Node', { index: i }) ); // Create all possible edges - nodes.forEach((from, i) => { - nodes.forEach((to, j) => { + await Promise.all(nodes.map(async (from, i) => { + nodes.forEach(async (to, j) => { if (i !== j) { - db.createEdge(from.id, 'LINKS', to.id); + await db.createEdge(from.id, 'LINKS', to.id); } }); - }); + })); // Query while still in transaction - const allNodes = db.nodes('Node').exec(); + const allNodes = await db.nodes('Node').exec(); expect(allNodes).toHaveLength(10); // Each node should have 9 outgoing edges - nodes.forEach(node => { - const outgoing = db.traverse(node.id).out('LINKS').toArray(); + await Promise.all(nodes.map(async node => { + const outgoing = await db.traverse(node.id).out('LINKS').toArray(); expect(outgoing).toHaveLength(9); - }); + })); }); }); - it('should validate graph invariants after operations', () => { + it('should validate graph invariants after operations', async () => { // Create graph with constraints - const root = db.createNode('Root', { value: 'root' }); - const children = Array.from({ length: 5 }, (_, i) => - db.createNode('Child', { value: `child-${i}`, parent: root.id }) + const root = await db.createNode('Root', { value: 'root' }); + const children = await Promise.all(Array.from({ length: 5 }, async (_, i) => await db.createNode('Child', { value: `child-${i}`, parent: root.id }) ); - children.forEach(child => { - db.createEdge(root.id, 'PARENT_OF', child.id); - }); + await Promise.all(children.map(async child => { + await db.createEdge(root.id, 'PARENT_OF', child.id); + })); // Invariant: Each child has exactly one parent - children.forEach(child => { - const parents = db.traverse(child.id).in('PARENT_OF').toArray(); + await Promise.all(children.map(async child => { + const parents = await db.traverse(child.id).in('PARENT_OF').toArray(); expect(parents).toHaveLength(1); expect(parents[0].id).toBe(root.id); - }); + })); // Invariant: Root has exactly 5 children - const rootChildren = db.traverse(root.id).out('PARENT_OF').toArray(); + const rootChildren = await db.traverse(root.id).out('PARENT_OF').toArray(); expect(rootChildren).toHaveLength(5); }); }); diff --git a/tests/integration/job-pipeline.test.ts b/tests/integration/job-pipeline.test.ts index 6dc5401..7dbee1e 100644 --- a/tests/integration/job-pipeline.test.ts +++ b/tests/integration/job-pipeline.test.ts @@ -6,28 +6,28 @@ import { Node } from '../../src/types'; * Integration tests for complete job application tracking workflows. * Tests real-world scenarios combining CRUD operations, queries, and traversals. */ -describe('Job Application Pipeline - Integration Tests', () => { +describe('Job Application Pipeline - Integration Tests', async () => { let db: GraphDatabase; beforeEach(() => { db = new GraphDatabase(':memory:'); }); - afterEach(() => { - db.close(); + afterEach(async () => { + await db.close(); }); - describe('Complete Job Discovery to Application Workflow', () => { - it('should track complete job application lifecycle', () => { + describe('Complete Job Discovery to Application Workflow', async () => { + it('should track complete job application lifecycle', async () => { // 1. Create companies - const techCorp = db.createNode('Company', { + const techCorp = await db.createNode('Company', { name: 'TechCorp', size: 'large', industry: 'Technology', location: 'San Francisco' }); - const startupInc = db.createNode('Company', { + const startupInc = await db.createNode('Company', { name: 'Startup Inc', size: 'small', industry: 'Technology', @@ -35,13 +35,13 @@ describe('Job Application Pipeline - Integration Tests', () => { }); // 2. Create skills - const typescript = db.createNode('Skill', { name: 'TypeScript', category: 'programming' }); - const react = db.createNode('Skill', { name: 'React', category: 'framework' }); - const nodejs = db.createNode('Skill', { name: 'Node.js', category: 'runtime' }); - const graphql = db.createNode('Skill', { name: 'GraphQL', category: 'api' }); + const typescript = await db.createNode('Skill', { name: 'TypeScript', category: 'programming' }); + const react = await db.createNode('Skill', { name: 'React', category: 'framework' }); + const nodejs = await db.createNode('Skill', { name: 'Node.js', category: 'runtime' }); + const graphql = await db.createNode('Skill', { name: 'GraphQL', category: 'api' }); // 3. Create jobs with discovery metadata - const seniorJob = db.createNode('Job', { + const seniorJob = await db.createNode('Job', { title: 'Senior Full Stack Engineer', status: 'discovered', salary: { min: 150000, max: 200000 }, @@ -50,7 +50,7 @@ describe('Job Application Pipeline - Integration Tests', () => { discoveredAt: new Date().toISOString() }); - const midLevelJob = db.createNode('Job', { + const midLevelJob = await db.createNode('Job', { title: 'Mid-Level Backend Engineer', status: 'discovered', salary: { min: 120000, max: 160000 }, @@ -60,19 +60,19 @@ describe('Job Application Pipeline - Integration Tests', () => { }); // 4. Create relationships - db.createEdge(seniorJob.id, 'POSTED_BY', techCorp.id); - db.createEdge(midLevelJob.id, 'POSTED_BY', startupInc.id); + await db.createEdge(seniorJob.id, 'POSTED_BY', techCorp.id); + await db.createEdge(midLevelJob.id, 'POSTED_BY', startupInc.id); - db.createEdge(seniorJob.id, 'REQUIRES', typescript.id, { level: 'expert', required: true }); - db.createEdge(seniorJob.id, 'REQUIRES', react.id, { level: 'expert', required: true }); - db.createEdge(seniorJob.id, 'REQUIRES', nodejs.id, { level: 'advanced', required: true }); - db.createEdge(seniorJob.id, 'REQUIRES', graphql.id, { level: 'intermediate', required: false }); + await db.createEdge(seniorJob.id, 'REQUIRES', typescript.id, { level: 'expert', required: true }); + await db.createEdge(seniorJob.id, 'REQUIRES', react.id, { level: 'expert', required: true }); + await db.createEdge(seniorJob.id, 'REQUIRES', nodejs.id, { level: 'advanced', required: true }); + await db.createEdge(seniorJob.id, 'REQUIRES', graphql.id, { level: 'intermediate', required: false }); - db.createEdge(midLevelJob.id, 'REQUIRES', nodejs.id, { level: 'intermediate', required: true }); - db.createEdge(midLevelJob.id, 'REQUIRES', graphql.id, { level: 'beginner', required: false }); + await db.createEdge(midLevelJob.id, 'REQUIRES', nodejs.id, { level: 'intermediate', required: true }); + await db.createEdge(midLevelJob.id, 'REQUIRES', graphql.id, { level: 'beginner', required: false }); // 5. Query for suitable jobs (remote, good salary) - const discoveredJobs = db.nodes('Job') + const discoveredJobs = await db.nodes('Job') .where({ status: 'discovered' }) .exec(); @@ -85,7 +85,7 @@ describe('Job Application Pipeline - Integration Tests', () => { expect(suitableJobs[0].properties.title).toBe('Senior Full Stack Engineer'); // 6. Update job status to 'interested' - const interestedJob = db.updateNode(seniorJob.id, { + const interestedJob = await db.updateNode(seniorJob.id, { status: 'interested', reviewedAt: new Date().toISOString(), notes: 'Great fit - strong TypeScript and React focus' @@ -94,7 +94,7 @@ describe('Job Application Pipeline - Integration Tests', () => { expect(interestedJob.properties.status).toBe('interested'); // 7. Find company details for the interested job - const jobsWithCompanies = db.nodes('Job') + const jobsWithCompanies = await db.nodes('Job') .where({ status: 'interested' }) .connectedTo('Company', 'POSTED_BY', 'out') .exec(); @@ -102,7 +102,7 @@ describe('Job Application Pipeline - Integration Tests', () => { expect(jobsWithCompanies).toHaveLength(1); // 8. Get company information through traversal - const companyNodes = db.traverse(seniorJob.id) + const companyNodes = await db.traverse(seniorJob.id) .out('POSTED_BY') .toArray(); @@ -110,7 +110,7 @@ describe('Job Application Pipeline - Integration Tests', () => { expect(companyNodes[0].properties.name).toBe('TechCorp'); // 9. Get all required skills for this job - const requiredSkills = db.traverse(seniorJob.id) + const requiredSkills = await db.traverse(seniorJob.id) .out('REQUIRES') .toArray(); @@ -121,14 +121,14 @@ describe('Job Application Pipeline - Integration Tests', () => { expect(skillNames).toContain('Node.js'); // 10. Update to 'applied' status - db.updateNode(seniorJob.id, { + await db.updateNode(seniorJob.id, { status: 'applied', appliedAt: new Date().toISOString(), applicationMethod: 'direct_website' }); // 11. Verify application tracking - const appliedJobs = db.nodes('Job') + const appliedJobs = await db.nodes('Job') .where({ status: 'applied' }) .exec(); @@ -136,24 +136,24 @@ describe('Job Application Pipeline - Integration Tests', () => { expect(appliedJobs[0].properties.applicationMethod).toBe('direct_website'); }); - it('should handle complete application rejection workflow', () => { - db.transaction(() => { + it('should handle complete application rejection workflow', async () => { + await db.transaction(async () => { // Create job and application - const company = db.createNode('Company', { name: 'RejectionCorp' }); - const job = db.createNode('Job', { + const company = await db.createNode('Company', { name: 'RejectionCorp' }); + const job = await db.createNode('Job', { title: 'Software Engineer', status: 'discovered' }); - db.createEdge(job.id, 'POSTED_BY', company.id); + await db.createEdge(job.id, 'POSTED_BY', company.id); // Move through application stages - db.updateNode(job.id, { status: 'interested' }); - db.updateNode(job.id, { status: 'applied', appliedAt: new Date().toISOString() }); - db.updateNode(job.id, { status: 'interviewing', interviewStage: 'technical' }); + await db.updateNode(job.id, { status: 'interested' }); + await db.updateNode(job.id, { status: 'applied', appliedAt: new Date().toISOString() }); + await db.updateNode(job.id, { status: 'interviewing', interviewStage: 'technical' }); // Handle rejection - db.updateNode(job.id, { + await db.updateNode(job.id, { status: 'rejected', rejectedAt: new Date().toISOString(), rejectionReason: 'Not moving forward after technical round', @@ -161,7 +161,7 @@ describe('Job Application Pipeline - Integration Tests', () => { }); // Verify rejection tracking - const rejectedJobs = db.nodes('Job') + const rejectedJobs = await db.nodes('Job') .where({ status: 'rejected' }) .exec(); @@ -171,7 +171,7 @@ describe('Job Application Pipeline - Integration Tests', () => { }); }); - it('should track multiple applications with different statuses', () => { + it('should track multiple applications with different statuses', async () => { // Create diverse job pipeline const jobs = [ { title: 'Job A', status: 'discovered', salary: { min: 120000 } }, @@ -183,15 +183,15 @@ describe('Job Application Pipeline - Integration Tests', () => { { title: 'Job G', status: 'withdrawn', salary: { min: 125000 } } ]; - const company = db.createNode('Company', { name: 'TestCorp' }); + const company = await db.createNode('Company', { name: 'TestCorp' }); - jobs.forEach(jobData => { - const job = db.createNode('Job', jobData); - db.createEdge(job.id, 'POSTED_BY', company.id); - }); + await Promise.all(jobs.map(async jobData => { + const job = await db.createNode('Job', jobData); + await db.createEdge(job.id, 'POSTED_BY', company.id); + })); // Query active pipeline (not rejected/withdrawn) - const activePipeline = db.nodes('Job') + const activePipeline = await db.nodes('Job') .filter(node => { const status = node.properties.status; return status !== 'rejected' && status !== 'withdrawn'; @@ -201,12 +201,12 @@ describe('Job Application Pipeline - Integration Tests', () => { expect(activePipeline).toHaveLength(5); // Query by specific stage - const interviewed = db.nodes('Job').where({ status: 'interviewing' }).exec(); + const interviewed = await db.nodes('Job').where({ status: 'interviewing' }).exec(); expect(interviewed).toHaveLength(1); expect(interviewed[0].properties.title).toBe('Job D'); // Query high-value opportunities (>= 150k) - const highValue = db.nodes('Job') + const highValue = await db.nodes('Job') .filter(node => { const salary = node.properties.salary; return salary && salary.min >= 150000; @@ -217,44 +217,44 @@ describe('Job Application Pipeline - Integration Tests', () => { }); }); - describe('Skill Matching and Job Discovery', () => { - it('should find jobs matching skill requirements', () => { + describe('Skill Matching and Job Discovery', async () => { + it('should find jobs matching skill requirements', async () => { // Create skills - const ts = db.createNode('Skill', { name: 'TypeScript' }); - const react = db.createNode('Skill', { name: 'React' }); - const python = db.createNode('Skill', { name: 'Python' }); - const go = db.createNode('Skill', { name: 'Go' }); + const ts = await db.createNode('Skill', { name: 'TypeScript' }); + const react = await db.createNode('Skill', { name: 'React' }); + const python = await db.createNode('Skill', { name: 'Python' }); + const go = await db.createNode('Skill', { name: 'Go' }); // Create jobs with different skill requirements - const frontendJob = db.createNode('Job', { + const frontendJob = await db.createNode('Job', { title: 'Frontend Engineer', status: 'active' }); - db.createEdge(frontendJob.id, 'REQUIRES', ts.id); - db.createEdge(frontendJob.id, 'REQUIRES', react.id); + await db.createEdge(frontendJob.id, 'REQUIRES', ts.id); + await db.createEdge(frontendJob.id, 'REQUIRES', react.id); - const backendJob = db.createNode('Job', { + const backendJob = await db.createNode('Job', { title: 'Backend Engineer', status: 'active' }); - db.createEdge(backendJob.id, 'REQUIRES', python.id); - db.createEdge(backendJob.id, 'REQUIRES', go.id); + await db.createEdge(backendJob.id, 'REQUIRES', python.id); + await db.createEdge(backendJob.id, 'REQUIRES', go.id); - const fullstackJob = db.createNode('Job', { + const fullstackJob = await db.createNode('Job', { title: 'Fullstack Engineer', status: 'active' }); - db.createEdge(fullstackJob.id, 'REQUIRES', ts.id); - db.createEdge(fullstackJob.id, 'REQUIRES', react.id); - db.createEdge(fullstackJob.id, 'REQUIRES', python.id); + await db.createEdge(fullstackJob.id, 'REQUIRES', ts.id); + await db.createEdge(fullstackJob.id, 'REQUIRES', react.id); + await db.createEdge(fullstackJob.id, 'REQUIRES', python.id); // Find jobs requiring TypeScript - get all active jobs first, then filter by skills - const allActiveJobs = db.nodes('Job') + const allActiveJobs = await db.nodes('Job') .where({ status: 'active' }) .exec(); - const tsJobs = allActiveJobs.filter(job => { - const skills = db.traverse(job.id).out('REQUIRES').toArray(); + const tsJobs = allActiveJobs.filter(async job => { + const skills = await db.traverse(job.id).out('REQUIRES').toArray(); return skills.some(skill => skill.properties.name === 'TypeScript'); }); @@ -264,27 +264,26 @@ describe('Job Application Pipeline - Integration Tests', () => { expect(titles).toContain('Fullstack Engineer'); }); - it('should calculate skill match percentage for jobs', () => { + it('should calculate skill match percentage for jobs', async () => { // My skills const mySkills = ['TypeScript', 'React', 'Node.js', 'PostgreSQL']; - const skillNodes = mySkills.map(name => - db.createNode('Skill', { name, owned: true }) - ); + const skillNodes = await Promise.all(mySkills.mapmap(async name => await db.createNode('Skill', { name, owned: true }) + )); // Job requirements - const job = db.createNode('Job', { + const job = await db.createNode('Job', { title: 'Full Stack Engineer', status: 'active' }); - db.createEdge(job.id, 'REQUIRES', skillNodes[0].id); // TypeScript - db.createEdge(job.id, 'REQUIRES', skillNodes[1].id); // React - db.createEdge(job.id, 'REQUIRES', skillNodes[2].id); // Node.js - db.createEdge(job.id, 'REQUIRES', db.createNode('Skill', { name: 'GraphQL' }).id); - db.createEdge(job.id, 'REQUIRES', db.createNode('Skill', { name: 'Docker' }).id); + await db.createEdge(job.id, 'REQUIRES', skillNodes[0].id); // TypeScript + await db.createEdge(job.id, 'REQUIRES', skillNodes[1].id); // React + await db.createEdge(job.id, 'REQUIRES', skillNodes[2].id); // Node.js + await db.createEdge(job.id, 'REQUIRES', (await db.createNode('Skill', { name: 'GraphQL' })).id); + await db.createEdge(job.id, 'REQUIRES', (await db.createNode('Skill', { name: 'Docker' })).id); // Calculate match percentage - const requiredSkills = db.traverse(job.id).out('REQUIRES').toArray(); + const requiredSkills = await db.traverse(job.id).out('REQUIRES').toArray(); const requiredSkillNames = requiredSkills.map(s => s.properties.name); const matchedSkills = mySkills.filter(skill => requiredSkillNames.includes(skill)); const matchPercentage = (matchedSkills.length / requiredSkills.length) * 100; @@ -294,9 +293,9 @@ describe('Job Application Pipeline - Integration Tests', () => { }); }); - describe('Company and Network Analysis', () => { - it('should analyze company job posting patterns', () => { - const company = db.createNode('Company', { + describe('Company and Network Analysis', async () => { + it('should analyze company job posting patterns', async () => { + const company = await db.createNode('Company', { name: 'BigTech Inc', size: 'large', industry: 'Technology' @@ -311,20 +310,19 @@ describe('Job Application Pipeline - Integration Tests', () => { 'Senior Backend Engineer' ]; - const jobs = jobTitles.map(title => - db.createNode('Job', { + const jobs = await Promise.all(jobTitles.mapmap(async title => await db.createNode('Job', { title, status: 'active', postedAt: new Date().toISOString() }) - ); + )); - jobs.forEach(job => { - db.createEdge(job.id, 'POSTED_BY', company.id); - }); + await Promise.all(jobs.map(async job => { + await db.createEdge(job.id, 'POSTED_BY', company.id); + })); // Analyze company's job postings - const companyJobs = db.traverse(company.id) + const companyJobs = await db.traverse(company.id) .in('POSTED_BY') .toArray(); @@ -338,32 +336,32 @@ describe('Job Application Pipeline - Integration Tests', () => { expect(seniorCount).toBe(2); }); - it('should find similar jobs through skill overlap', () => { + it('should find similar jobs through skill overlap', async () => { // Create skills - const ts = db.createNode('Skill', { name: 'TypeScript' }); - const react = db.createNode('Skill', { name: 'React' }); - const node = db.createNode('Skill', { name: 'Node.js' }); - const python = db.createNode('Skill', { name: 'Python' }); + const ts = await db.createNode('Skill', { name: 'TypeScript' }); + const react = await db.createNode('Skill', { name: 'React' }); + const node = await db.createNode('Skill', { name: 'Node.js' }); + const python = await db.createNode('Skill', { name: 'Python' }); // Create jobs with overlapping skills - const job1 = db.createNode('Job', { title: 'Job 1', status: 'active' }); - db.createEdge(job1.id, 'REQUIRES', ts.id); - db.createEdge(job1.id, 'REQUIRES', react.id); - db.createEdge(job1.id, 'REQUIRES', node.id); + const job1 = await db.createNode('Job', { title: 'Job 1', status: 'active' }); + await db.createEdge(job1.id, 'REQUIRES', ts.id); + await db.createEdge(job1.id, 'REQUIRES', react.id); + await db.createEdge(job1.id, 'REQUIRES', node.id); - const job2 = db.createNode('Job', { title: 'Job 2', status: 'active' }); - db.createEdge(job2.id, 'REQUIRES', ts.id); - db.createEdge(job2.id, 'REQUIRES', react.id); - db.createEdge(job2.id, 'REQUIRES', python.id); + const job2 = await db.createNode('Job', { title: 'Job 2', status: 'active' }); + await db.createEdge(job2.id, 'REQUIRES', ts.id); + await db.createEdge(job2.id, 'REQUIRES', react.id); + await db.createEdge(job2.id, 'REQUIRES', python.id); - const job3 = db.createNode('Job', { title: 'Job 3', status: 'active' }); - db.createEdge(job3.id, 'REQUIRES', python.id); + const job3 = await db.createNode('Job', { title: 'Job 3', status: 'active' }); + await db.createEdge(job3.id, 'REQUIRES', python.id); // Add explicit similarity relationship - db.createEdge(job1.id, 'SIMILAR_TO', job2.id, { reason: 'skill_overlap', similarity: 0.8 }); + await db.createEdge(job1.id, 'SIMILAR_TO', job2.id, { reason: 'skill_overlap', similarity: 0.8 }); // Find similar jobs - const similarJobs = db.traverse(job1.id) + const similarJobs = await db.traverse(job1.id) .out('SIMILAR_TO') .toArray(); @@ -371,8 +369,8 @@ describe('Job Application Pipeline - Integration Tests', () => { expect(similarJobs[0].properties.title).toBe('Job 2'); // Calculate skill overlap programmatically - const job1Skills = db.traverse(job1.id).out('REQUIRES').toArray(); - const job2Skills = db.traverse(job2.id).out('REQUIRES').toArray(); + const job1Skills = await db.traverse(job1.id).out('REQUIRES').toArray(); + const job2Skills = await db.traverse(job2.id).out('REQUIRES').toArray(); const job1SkillNames = new Set(job1Skills.map(s => s.properties.name)); const job2SkillNames = new Set(job2Skills.map(s => s.properties.name)); @@ -382,19 +380,19 @@ describe('Job Application Pipeline - Integration Tests', () => { }); }); - describe('Interview and Offer Management', () => { - it('should track interview pipeline with multiple rounds', () => { - const company = db.createNode('Company', { name: 'InterviewCorp' }); - const job = db.createNode('Job', { + describe('Interview and Offer Management', async () => { + it('should track interview pipeline with multiple rounds', async () => { + const company = await db.createNode('Company', { name: 'InterviewCorp' }); + const job = await db.createNode('Job', { title: 'Senior Engineer', status: 'applied', appliedAt: new Date('2025-01-01').toISOString() }); - db.createEdge(job.id, 'POSTED_BY', company.id); + await db.createEdge(job.id, 'POSTED_BY', company.id); // Create interview rounds as separate nodes - const screening = db.createNode('Interview', { + const screening = await db.createNode('Interview', { round: 'screening', date: new Date('2025-01-15').toISOString(), duration: 30, @@ -402,7 +400,7 @@ describe('Job Application Pipeline - Integration Tests', () => { outcome: 'passed' }); - const technical = db.createNode('Interview', { + const technical = await db.createNode('Interview', { round: 'technical', date: new Date('2025-01-22').toISOString(), duration: 60, @@ -410,7 +408,7 @@ describe('Job Application Pipeline - Integration Tests', () => { outcome: 'passed' }); - const behavioral = db.createNode('Interview', { + const behavioral = await db.createNode('Interview', { round: 'behavioral', date: new Date('2025-01-29').toISOString(), duration: 45, @@ -418,7 +416,7 @@ describe('Job Application Pipeline - Integration Tests', () => { outcome: 'passed' }); - const onsite = db.createNode('Interview', { + const onsite = await db.createNode('Interview', { round: 'onsite', date: new Date('2025-02-05').toISOString(), duration: 240, @@ -427,16 +425,16 @@ describe('Job Application Pipeline - Integration Tests', () => { }); // Link interviews to job - db.createEdge(job.id, 'HAS_INTERVIEW', screening.id, { sequence: 1 }); - db.createEdge(job.id, 'HAS_INTERVIEW', technical.id, { sequence: 2 }); - db.createEdge(job.id, 'HAS_INTERVIEW', behavioral.id, { sequence: 3 }); - db.createEdge(job.id, 'HAS_INTERVIEW', onsite.id, { sequence: 4 }); + await db.createEdge(job.id, 'HAS_INTERVIEW', screening.id, { sequence: 1 }); + await db.createEdge(job.id, 'HAS_INTERVIEW', technical.id, { sequence: 2 }); + await db.createEdge(job.id, 'HAS_INTERVIEW', behavioral.id, { sequence: 3 }); + await db.createEdge(job.id, 'HAS_INTERVIEW', onsite.id, { sequence: 4 }); // Update job status - db.updateNode(job.id, { status: 'interviewing', currentRound: 'onsite' }); + await db.updateNode(job.id, { status: 'interviewing', currentRound: 'onsite' }); // Query all interviews for this job - const interviews = db.traverse(job.id) + const interviews = await db.traverse(job.id) .out('HAS_INTERVIEW') .toArray(); @@ -451,17 +449,17 @@ describe('Job Application Pipeline - Integration Tests', () => { expect(totalMinutes).toBe(375); }); - it('should manage offer negotiation workflow', () => { - const company = db.createNode('Company', { name: 'OfferCorp' }); - const job = db.createNode('Job', { + it('should manage offer negotiation workflow', async () => { + const company = await db.createNode('Company', { name: 'OfferCorp' }); + const job = await db.createNode('Job', { title: 'Principal Engineer', status: 'interviewing' }); - db.createEdge(job.id, 'POSTED_BY', company.id); + await db.createEdge(job.id, 'POSTED_BY', company.id); // Create offer node - const offer = db.createNode('Offer', { + const offer = await db.createNode('Offer', { baseSalary: 200000, bonus: 50000, equity: { type: 'RSU', amount: 100000, vestingYears: 4 }, @@ -470,32 +468,32 @@ describe('Job Application Pipeline - Integration Tests', () => { deadline: new Date('2025-02-20').toISOString() }); - db.createEdge(job.id, 'RECEIVED_OFFER', offer.id); + await db.createEdge(job.id, 'RECEIVED_OFFER', offer.id); // Update job status - db.updateNode(job.id, { + await db.updateNode(job.id, { status: 'offered', offeredAt: new Date().toISOString() }); // Create counter-offer - const counter = db.createNode('CounterOffer', { + const counter = await db.createNode('CounterOffer', { baseSalary: 220000, bonus: 60000, equity: { type: 'RSU', amount: 120000, vestingYears: 4 }, reasoning: 'Market rate for Principal Engineer with 10+ years experience' }); - db.createEdge(offer.id, 'COUNTERED_WITH', counter.id); + await db.createEdge(offer.id, 'COUNTERED_WITH', counter.id); // Verify offer chain - const offers = db.traverse(job.id) + const offers = await db.traverse(job.id) .out('RECEIVED_OFFER') .toArray(); expect(offers).toHaveLength(1); - const counterOffers = db.traverse(offers[0].id) + const counterOffers = await db.traverse(offers[0].id) .out('COUNTERED_WITH') .toArray(); @@ -504,21 +502,21 @@ describe('Job Application Pipeline - Integration Tests', () => { }); }); - describe('Data Integrity and Consistency', () => { - it('should maintain referential integrity when deleting jobs', () => { - const company = db.createNode('Company', { name: 'TestCorp' }); - const job = db.createNode('Job', { title: 'Test Job' }); - const skill = db.createNode('Skill', { name: 'Testing' }); + describe('Data Integrity and Consistency', async () => { + it('should maintain referential integrity when deleting jobs', async () => { + const company = await db.createNode('Company', { name: 'TestCorp' }); + const job = await db.createNode('Job', { title: 'Test Job' }); + const skill = await db.createNode('Skill', { name: 'Testing' }); - db.createEdge(job.id, 'POSTED_BY', company.id); - db.createEdge(job.id, 'REQUIRES', skill.id); + await db.createEdge(job.id, 'POSTED_BY', company.id); + await db.createEdge(job.id, 'REQUIRES', skill.id); // Delete job (edges should be automatically deleted via CASCADE) - const deleted = db.deleteNode(job.id); + const deleted = await db.deleteNode(job.id); expect(deleted).toBe(true); // Verify job is gone - const retrievedJob = db.getNode(job.id); + const retrievedJob = await db.getNode(job.id); expect(retrievedJob).toBeNull(); // Verify company and skill still exist @@ -526,52 +524,51 @@ describe('Job Application Pipeline - Integration Tests', () => { expect(db.getNode(skill.id)).not.toBeNull(); // Verify edges are gone - const companyJobs = db.traverse(company.id).in('POSTED_BY').toArray(); + const companyJobs = await db.traverse(company.id).in('POSTED_BY').toArray(); expect(companyJobs).toHaveLength(0); }); - it('should handle concurrent status updates correctly', () => { - const job = db.createNode('Job', { + it('should handle concurrent status updates correctly', async () => { + const job = await db.createNode('Job', { title: 'Concurrent Test', status: 'discovered' }); // Simulate rapid status updates - db.transaction(() => { - db.updateNode(job.id, { status: 'interested' }); - db.updateNode(job.id, { status: 'applied' }); - db.updateNode(job.id, { status: 'interviewing' }); + await db.transaction(async () => { + await db.updateNode(job.id, { status: 'interested' }); + await db.updateNode(job.id, { status: 'applied' }); + await db.updateNode(job.id, { status: 'interviewing' }); }); - const finalJob = db.getNode(job.id); + const finalJob = await db.getNode(job.id); expect(finalJob?.properties.status).toBe('interviewing'); }); - it('should validate complex relationship constraints', () => { - const company1 = db.createNode('Company', { name: 'Company 1' }); - const company2 = db.createNode('Company', { name: 'Company 2' }); - const job = db.createNode('Job', { title: 'Multi-Company Job' }); + it('should validate complex relationship constraints', async () => { + const company1 = await db.createNode('Company', { name: 'Company 1' }); + const company2 = await db.createNode('Company', { name: 'Company 2' }); + const job = await db.createNode('Job', { title: 'Multi-Company Job' }); // Job can only be posted by one company (business logic, not enforced by DB) - db.createEdge(job.id, 'POSTED_BY', company1.id); + await db.createEdge(job.id, 'POSTED_BY', company1.id); // This would violate business logic - should be prevented by application layer // For this test, we just verify the database allows it but application should prevent - db.createEdge(job.id, 'POSTED_BY', company2.id); + await db.createEdge(job.id, 'POSTED_BY', company2.id); - const companies = db.traverse(job.id).out('POSTED_BY').toArray(); + const companies = await db.traverse(job.id).out('POSTED_BY').toArray(); // Database allows multiple edges, but application should enforce single company expect(companies.length).toBeGreaterThan(0); }); }); - describe('Performance with Realistic Data Volumes', () => { - it('should handle 100+ jobs efficiently', () => { + describe('Performance with Realistic Data Volumes', async () => { + it('should handle 100+ jobs efficiently', async () => { const startTime = Date.now(); // Create companies - const companies = Array.from({ length: 20 }, (_, i) => - db.createNode('Company', { + const companies = await Promise.all(Array.from({ length: 20 }, async (_, i) => await db.createNode('Company', { name: `Company ${i}`, size: i % 3 === 0 ? 'large' : i % 3 === 1 ? 'medium' : 'small' }) @@ -579,12 +576,11 @@ describe('Job Application Pipeline - Integration Tests', () => { // Create skills const skills = ['TypeScript', 'React', 'Node.js', 'Python', 'Go', 'Rust', 'Java', 'C++'].map( - name => db.createNode('Skill', { name }) + async name => await db.createNode('Skill', { name }) ); // Create 100 jobs - const jobs = Array.from({ length: 100 }, (_, i) => - db.createNode('Job', { + const jobs = await Promise.all(Array.from({ length: 100 }, async (_, i) => await db.createNode('Job', { title: `Job ${i}`, status: ['discovered', 'interested', 'applied', 'interviewing'][i % 4], salary: { min: 100000 + i * 1000, max: 150000 + i * 1000 }, @@ -593,26 +589,26 @@ describe('Job Application Pipeline - Integration Tests', () => { ); // Create relationships - jobs.forEach((job, i) => { + await Promise.all(jobs.map(async (job, i) => { const company = companies[i % companies.length]; - db.createEdge(job.id, 'POSTED_BY', company.id); + await db.createEdge(job.id, 'POSTED_BY', company.id); // Add 2-4 skills per job const skillCount = 2 + (i % 3); for (let j = 0; j < skillCount; j++) { const skill = skills[(i + j) % skills.length]; - db.createEdge(job.id, 'REQUIRES', skill.id); + await db.createEdge(job.id, 'REQUIRES', skill.id); } - }); + })); const setupTime = Date.now() - startTime; // Query performance const queryStart = Date.now(); - const activeJobs = db.nodes('Job') + const activeJobs = await db.nodes('Job') .where({ status: 'active' }) .exec(); - const allJobs = db.nodes('Job').exec(); + const allJobs = await db.nodes('Job').exec(); const remoteJobs = allJobs.filter(j => j.properties.remote === true); const highPayJobs = allJobs.filter(node => { const salary = node.properties.salary; @@ -622,9 +618,9 @@ describe('Job Application Pipeline - Integration Tests', () => { // Traversal performance const traversalStart = Date.now(); - jobs.slice(0, 10).forEach(job => { - db.traverse(job.id).out('POSTED_BY').toArray(); - db.traverse(job.id).out('REQUIRES').toArray(); + jobs.slice(0, 10).forEach(async job => { + await db.traverse(job.id).out('POSTED_BY').toArray(); + await db.traverse(job.id).out('REQUIRES').toArray(); }); const traversalTime = Date.now() - traversalStart; diff --git a/tests/unit/Database-merge.test.ts b/tests/unit/Database-merge.test.ts index a983f8c..d251c1f 100644 --- a/tests/unit/Database-merge.test.ts +++ b/tests/unit/Database-merge.test.ts @@ -1,21 +1,21 @@ import { GraphDatabase } from '../../src/core/Database'; import { MergeConflictError } from '../../src/types/merge'; -describe('GraphDatabase - Merge Operations', () => { +describe('GraphDatabase - Merge Operations', async () => { let db: GraphDatabase; beforeEach(() => { db = new GraphDatabase(':memory:'); }); - afterEach(() => { - db.close(); + afterEach(async () => { + await db.close(); }); - describe('mergeNode()', () => { - describe('Node creation', () => { - it('should create new node when no match found', () => { - const result = db.mergeNode( + describe('mergeNode()', async () => { + describe('Node creation', async () => { + it('should create new node when no match found', async () => { + const result = await db.mergeNode( 'Job', { url: 'https://example.com/job/1' }, { title: 'Engineer', status: 'active', url: 'https://example.com/job/1' } @@ -28,8 +28,8 @@ describe('GraphDatabase - Merge Operations', () => { expect(result.node.properties.status).toBe('active'); }); - it('should merge matchProperties with baseProperties on create', () => { - const result = db.mergeNode( + it('should merge matchProperties with baseProperties on create', async () => { + const result = await db.mergeNode( 'Company', { name: 'TechCorp' }, { name: 'TechCorp', industry: 'SaaS', size: 'Large' } @@ -43,8 +43,8 @@ describe('GraphDatabase - Merge Operations', () => { }); }); - it('should apply onCreate properties when creating', () => { - const result = db.mergeNode( + it('should apply onCreate properties when creating', async () => { + const result = await db.mergeNode( 'Job', { url: 'https://example.com/job/1' }, { url: 'https://example.com/job/1', title: 'Engineer' }, @@ -56,8 +56,8 @@ describe('GraphDatabase - Merge Operations', () => { expect((result.node.properties as any).status).toBe('new'); }); - it('should not apply onMatch properties when creating', () => { - const result = db.mergeNode( + it('should not apply onMatch properties when creating', async () => { + const result = await db.mergeNode( 'Job', { url: 'https://example.com/job/1' }, { url: 'https://example.com/job/1', title: 'Engineer' }, @@ -69,15 +69,15 @@ describe('GraphDatabase - Merge Operations', () => { }); }); - describe('Node matching and update', () => { - it('should find existing node by match criteria', () => { - const created = db.createNode('Job', { + describe('Node matching and update', async () => { + it('should find existing node by match criteria', async () => { + const created = await db.createNode('Job', { url: 'https://example.com/job/1', title: 'Engineer', status: 'active' }); - const result = db.mergeNode( + const result = await db.mergeNode( 'Job', { url: 'https://example.com/job/1' } ); @@ -86,14 +86,14 @@ describe('GraphDatabase - Merge Operations', () => { expect(result.node.id).toBe(created.id); }); - it('should apply onMatch properties on existing node', () => { - db.createNode('Job', { + it('should apply onMatch properties on existing node', async () => { + await db.createNode('Job', { url: 'https://example.com/job/1', title: 'Engineer', status: 'active' }); - const result = db.mergeNode( + const result = await db.mergeNode( 'Job', { url: 'https://example.com/job/1' }, undefined, @@ -108,13 +108,13 @@ describe('GraphDatabase - Merge Operations', () => { expect(result.node.properties.url).toBe('https://example.com/job/1'); }); - it('should not apply onCreate properties when matching', () => { - db.createNode('Job', { + it('should not apply onCreate properties when matching', async () => { + await db.createNode('Job', { url: 'https://example.com/job/1', title: 'Engineer' }); - const result = db.mergeNode( + const result = await db.mergeNode( 'Job', { url: 'https://example.com/job/1' }, undefined, @@ -125,14 +125,14 @@ describe('GraphDatabase - Merge Operations', () => { expect(result.node.properties.discovered).toBeUndefined(); }); - it('should match with multiple criteria (AND logic)', () => { - db.createNode('Job', { + it('should match with multiple criteria (AND logic)', async () => { + await db.createNode('Job', { url: 'https://example.com/job/1', company: 'TechCorp', title: 'Engineer' }); - const result = db.mergeNode( + const result = await db.mergeNode( 'Job', { url: 'https://example.com/job/1', company: 'TechCorp' } ); @@ -140,14 +140,14 @@ describe('GraphDatabase - Merge Operations', () => { expect(result.created).toBe(false); }); - it('should create new node if any match criterion differs', () => { - db.createNode('Job', { + it('should create new node if any match criterion differs', async () => { + await db.createNode('Job', { url: 'https://example.com/job/1', company: 'TechCorp' }); // Different company - should create new node - const result = db.mergeNode( + const result = await db.mergeNode( 'Job', { url: 'https://example.com/job/1', company: 'Other Corp' }, { url: 'https://example.com/job/1', company: 'Other Corp', title: 'Job' } @@ -157,26 +157,24 @@ describe('GraphDatabase - Merge Operations', () => { }); }); - describe('Conflict detection', () => { - it('should throw error when multiple nodes match', () => { - db.createNode('Company', { industry: 'SaaS', name: 'Corp1' }); - db.createNode('Company', { industry: 'SaaS', name: 'Corp2' }); + describe('Conflict detection', async () => { + it('should throw error when multiple nodes match', async () => { + await db.createNode('Company', { industry: 'SaaS', name: 'Corp1' }); + await db.createNode('Company', { industry: 'SaaS', name: 'Corp2' }); - expect(() => { - db.mergeNode( + await expect(db.mergeNode( 'Company', { industry: 'SaaS' }, { industry: 'SaaS', size: 'Large' } - ); - }).toThrow(MergeConflictError); + )).rejects.toThrow(MergeConflictError); }); - it('should include conflict details in error', () => { - db.createNode('Job', { status: 'active', title: 'Job1' }); - db.createNode('Job', { status: 'active', title: 'Job2' }); + it('should include conflict details in error', async () => { + await db.createNode('Job', { status: 'active', title: 'Job1' }); + await db.createNode('Job', { status: 'active', title: 'Job2' }); try { - db.mergeNode('Job', { status: 'active' }, { status: 'active' }); + await db.mergeNode('Job', { status: 'active' }, { status: 'active' }); fail('Should have thrown MergeConflictError'); } catch (error) { expect(error).toBeInstanceOf(MergeConflictError); @@ -186,11 +184,11 @@ describe('GraphDatabase - Merge Operations', () => { } }); - it('should not throw when only one node matches', () => { - db.createNode('Company', { name: 'TechCorp', industry: 'SaaS' }); + it('should not throw when only one node matches', async () => { + await db.createNode('Company', { name: 'TechCorp', industry: 'SaaS' }); - expect(() => { - db.mergeNode( + expect(async () => { + await db.mergeNode( 'Company', { name: 'TechCorp' }, { name: 'TechCorp', size: 'Large' } @@ -199,21 +197,17 @@ describe('GraphDatabase - Merge Operations', () => { }); }); - describe('Edge cases', () => { - it('should throw on invalid node type', () => { - expect(() => { - db.mergeNode('', { name: 'Test' }, { name: 'Test' }); - }).toThrow(); + describe('Edge cases', async () => { + it('should throw on invalid node type', async () => { + await expect(db.mergeNode('', { name: 'Test' }, { name: 'Test' })).rejects.toThrow(); }); - it('should throw on empty matchProperties', () => { - expect(() => { - db.mergeNode('Job', {}, { title: 'Engineer' }); - }).toThrow(/Match properties cannot be empty/); + it('should throw on empty matchProperties', async () => { + await expect(db.mergeNode('Job', {}, { title: 'Engineer' })).rejects.toThrow(/Match properties cannot be empty/); }); - it('should handle nested properties', () => { - const result = db.mergeNode( + it('should handle nested properties', async () => { + const result = await db.mergeNode( 'Job', { url: 'https://example.com/job/1' }, { @@ -234,18 +228,18 @@ describe('GraphDatabase - Merge Operations', () => { }); }); - describe('mergeEdge()', () => { + describe('mergeEdge()', async () => { let jobId: number; let companyId: number; - beforeEach(() => { - jobId = db.createNode('Job', { title: 'Engineer' }).id; - companyId = db.createNode('Company', { name: 'TechCorp' }).id; + beforeEach(async () => { + jobId = (await db.createNode('Job', { title: 'Engineer' })).id; + companyId = (await db.createNode('Company', { name: 'TechCorp' })).id; }); - describe('Edge creation', () => { - it('should create new edge when none exists', () => { - const result = db.mergeEdge( + describe('Edge creation', async () => { + it('should create new edge when none exists', async () => { + const result = await db.mergeEdge( jobId, 'POSTED_BY', companyId, @@ -259,15 +253,15 @@ describe('GraphDatabase - Merge Operations', () => { expect(result.edge.properties).toEqual({ posted_date: '2024-01-01' }); }); - it('should create edge without properties', () => { - const result = db.mergeEdge(jobId, 'POSTED_BY', companyId); + it('should create edge without properties', async () => { + const result = await db.mergeEdge(jobId, 'POSTED_BY', companyId); expect(result.created).toBe(true); expect(result.edge.properties).toBeUndefined(); }); - it('should apply onCreate properties when creating', () => { - const result = db.mergeEdge( + it('should apply onCreate properties when creating', async () => { + const result = await db.mergeEdge( jobId, 'POSTED_BY', companyId, @@ -281,11 +275,11 @@ describe('GraphDatabase - Merge Operations', () => { }); }); - describe('Edge matching and update', () => { - it('should find existing edge and apply onMatch properties', () => { - db.createEdge(jobId, 'POSTED_BY', companyId, { status: 'draft' }); + describe('Edge matching and update', async () => { + it('should find existing edge and apply onMatch properties', async () => { + await db.createEdge(jobId, 'POSTED_BY', companyId, { status: 'draft' }); - const result = db.mergeEdge( + const result = await db.mergeEdge( jobId, 'POSTED_BY', companyId, @@ -298,14 +292,14 @@ describe('GraphDatabase - Merge Operations', () => { expect((result.edge.properties as any)?.updated_date).toBe('2024-01-02'); }); - it('should merge properties not replace them', () => { - db.createEdge(jobId, 'POSTED_BY', companyId, { + it('should merge properties not replace them', async () => { + await db.createEdge(jobId, 'POSTED_BY', companyId, { status: 'draft', created: '2024-01-01', author: 'system' }); - const result = db.mergeEdge( + const result = await db.mergeEdge( jobId, 'POSTED_BY', companyId, @@ -320,10 +314,10 @@ describe('GraphDatabase - Merge Operations', () => { }); }); - it('should handle null existing properties', () => { - db.createEdge(jobId, 'POSTED_BY', companyId); + it('should handle null existing properties', async () => { + await db.createEdge(jobId, 'POSTED_BY', companyId); - const result = db.mergeEdge( + const result = await db.mergeEdge( jobId, 'POSTED_BY', companyId, @@ -337,111 +331,103 @@ describe('GraphDatabase - Merge Operations', () => { }); }); - describe('Conflict detection', () => { - it('should throw when multiple edges exist with same type', () => { - db.createEdge(jobId, 'SIMILAR_TO', companyId); - db.createEdge(jobId, 'SIMILAR_TO', companyId); + describe('Conflict detection', async () => { + it('should throw when multiple edges exist with same type', async () => { + await db.createEdge(jobId, 'SIMILAR_TO', companyId); + await db.createEdge(jobId, 'SIMILAR_TO', companyId); - expect(() => { - db.mergeEdge(jobId, 'SIMILAR_TO', companyId, { score: 0.9 }); - }).toThrow(MergeConflictError); + await expect(db.mergeEdge(jobId, 'SIMILAR_TO', companyId, { score: 0.9 })).rejects.toThrow(MergeConflictError); }); - it('should not throw when only one edge matches', () => { - db.createEdge(jobId, 'POSTED_BY', companyId); + it('should not throw when only one edge matches', async () => { + await db.createEdge(jobId, 'POSTED_BY', companyId); - expect(() => { - db.mergeEdge(jobId, 'POSTED_BY', companyId, { status: 'updated' }); + expect(async () => { + await db.mergeEdge(jobId, 'POSTED_BY', companyId, { status: 'updated' }); }).not.toThrow(); }); }); - describe('Edge cases', () => { - it('should throw on invalid from node', () => { - expect(() => { - db.mergeEdge(999999, 'POSTED_BY', companyId); - }).toThrow(); + describe('Edge cases', async () => { + it('should throw on invalid from node', async () => { + await expect(db.mergeEdge(999999, 'POSTED_BY', companyId)).rejects.toThrow(); }); - it('should throw on invalid to node', () => { - expect(() => { - db.mergeEdge(jobId, 'POSTED_BY', 999999); - }).toThrow(); + it('should throw on invalid to node', async () => { + await expect(db.mergeEdge(jobId, 'POSTED_BY', 999999)).rejects.toThrow(); }); - it('should throw on invalid edge type', () => { - expect(() => { - db.mergeEdge(jobId, '', companyId); - }).toThrow(); + it('should throw on invalid edge type', async () => { + await expect(db.mergeEdge(jobId, '', companyId)).rejects.toThrow(); }); }); }); - describe('Index Management', () => { - describe('createPropertyIndex()', () => { - it('should create single-property index', () => { - db.createPropertyIndex('Job', 'url'); + describe('Index Management', async () => { + describe('createPropertyIndex()', async () => { + it('should create single-property index', async () => { + await db.createPropertyIndex('Job', 'url'); - const indexes = db.listIndexes(); + const indexes = await db.listIndexes(); const created = indexes.find(idx => idx.name === 'idx_merge_Job_url'); expect(created).toBeDefined(); expect(created?.table).toBe('nodes'); }); - it('should create unique index when specified', () => { - db.createPropertyIndex('Job', 'url', true); + it('should create unique index when specified', async () => { + await db.createPropertyIndex('Job', 'url', true); - const indexes = db.listIndexes(); + const indexes = await db.listIndexes(); const created = indexes.find(idx => idx.name === 'idx_merge_Job_url'); expect(created).toBeDefined(); expect(created?.unique).toBe(true); }); - it('should be idempotent (no error on duplicate)', () => { - db.createPropertyIndex('Job', 'url'); + it('should be idempotent (no error on duplicate)', async () => { + await db.createPropertyIndex('Job', 'url'); - expect(() => { - db.createPropertyIndex('Job', 'url'); + expect(async () => { + await db.createPropertyIndex('Job', 'url'); }).not.toThrow(); }); }); - describe('dropIndex()', () => { - it('should drop existing index', () => { - db.createPropertyIndex('Job', 'url'); - db.dropIndex('idx_merge_Job_url'); + describe('dropIndex()', async () => { + it('should drop existing index', async () => { + await db.createPropertyIndex('Job', 'url'); + await db.dropIndex('idx_merge_Job_url'); - const indexes = db.listIndexes(); + const indexes = await db.listIndexes(); const found = indexes.find(idx => idx.name === 'idx_merge_Job_url'); expect(found).toBeUndefined(); }); - it('should be idempotent (no error if index does not exist)', () => { - expect(() => { - db.dropIndex('idx_nonexistent'); + it('should be idempotent (no error if index does not exist)', async () => { + expect(async () => { + await db.dropIndex('idx_nonexistent'); }).not.toThrow(); }); }); - describe('listIndexes()', () => { - it('should list custom merge indexes', () => { - db.createPropertyIndex('Job', 'url'); - db.createPropertyIndex('Company', 'name'); + describe('listIndexes()', async () => { + it('should list custom merge indexes', async () => { + await db.createPropertyIndex('Job', 'url'); + await db.createPropertyIndex('Company', 'name'); - const indexes = db.listIndexes(); + const indexes = await db.listIndexes(); expect(indexes.length).toBeGreaterThanOrEqual(2); expect(indexes.some(idx => idx.name === 'idx_merge_Job_url')).toBe(true); expect(indexes.some(idx => idx.name === 'idx_merge_Company_name')).toBe(true); }); - it('should only return merge indexes', () => { - db.createPropertyIndex('Job', 'url'); + it('should only return merge indexes', async () => { + await db.createPropertyIndex('Job', 'url'); - const indexes = db.listIndexes(); + const indexes = await db.listIndexes(); // Should only include idx_merge_* indexes expect(indexes.every(idx => idx.name.startsWith('idx_merge_'))).toBe(true); diff --git a/tests/unit/Database.test.ts b/tests/unit/Database.test.ts index 0d91693..72fa48f 100644 --- a/tests/unit/Database.test.ts +++ b/tests/unit/Database.test.ts @@ -2,7 +2,7 @@ import { GraphDatabase } from '../../src/core/Database'; import { Node, Edge, GraphSchema, GraphExport } from '../../src/types'; import { TransactionAlreadyFinalizedError } from '../../src/core/Transaction'; -describe('GraphDatabase', () => { +describe('GraphDatabase', async () => { let db: GraphDatabase; beforeEach(() => { @@ -10,18 +10,18 @@ describe('GraphDatabase', () => { db = new GraphDatabase(':memory:'); }); - afterEach(() => { - db.close(); + afterEach(async () => { + await db.close(); }); - describe('constructor', () => { - it('should create database with in-memory path', () => { + describe('constructor', async () => { + it('should create database with in-memory path', async () => { const testDb = new GraphDatabase(':memory:'); expect(testDb).toBeDefined(); - testDb.close(); + await testDb.close(); }); - it('should create database with schema validation', () => { + it('should create database with schema validation', async () => { const schema: GraphSchema = { nodes: { Job: { properties: ['title', 'status'] }, @@ -34,22 +34,22 @@ describe('GraphDatabase', () => { const testDb = new GraphDatabase(':memory:', { schema }); expect(testDb).toBeDefined(); - testDb.close(); + await testDb.close(); }); - it('should create database with custom options', () => { + it('should create database with custom options', async () => { const testDb = new GraphDatabase(':memory:', { timeout: 5000, verbose: console.log }); expect(testDb).toBeDefined(); - testDb.close(); + await testDb.close(); }); }); - describe('createNode', () => { - it('should create node with valid type and properties', () => { - const node = db.createNode('Job', { + describe('createNode', async () => { + it('should create node with valid type and properties', async () => { + const node = await db.createNode('Job', { title: 'Senior Engineer', status: 'active', salary: 150000 @@ -65,8 +65,8 @@ describe('GraphDatabase', () => { expect(node.updatedAt).toBeInstanceOf(Date); }); - it('should create node with nested object properties', () => { - const node = db.createNode('Job', { + it('should create node with nested object properties', async () => { + const node = await db.createNode('Job', { title: 'Engineer', salary: { min: 100000, max: 150000 }, location: { city: 'San Francisco', state: 'CA' } @@ -76,8 +76,8 @@ describe('GraphDatabase', () => { expect(node.properties.location).toEqual({ city: 'San Francisco', state: 'CA' }); }); - it('should create node with array properties', () => { - const node = db.createNode('Job', { + it('should create node with array properties', async () => { + const node = await db.createNode('Job', { title: 'Engineer', skills: ['JavaScript', 'TypeScript', 'React'], tags: [1, 2, 3] @@ -87,28 +87,28 @@ describe('GraphDatabase', () => { expect(node.properties.tags).toEqual([1, 2, 3]); }); - it('should create node with empty properties', () => { - const node = db.createNode('Job', {}); + it('should create node with empty properties', async () => { + const node = await db.createNode('Job', {}); expect(node.id).toBeGreaterThan(0); expect(node.properties).toEqual({}); }); - it('should create multiple nodes with auto-incrementing IDs', () => { - const node1 = db.createNode('Job', { title: 'Job 1' }); - const node2 = db.createNode('Job', { title: 'Job 2' }); - const node3 = db.createNode('Company', { name: 'Company 1' }); + it('should create multiple nodes with auto-incrementing IDs', async () => { + const node1 = await db.createNode('Job', { title: 'Job 1' }); + const node2 = await db.createNode('Job', { title: 'Job 2' }); + const node3 = await db.createNode('Company', { name: 'Company 1' }); expect(node2.id).toBe(node1.id + 1); expect(node3.id).toBe(node2.id + 1); }); - it('should throw error for invalid node type', () => { - expect(() => db.createNode('', { title: 'Test' })).toThrow('Node type must be a non-empty string'); + it('should throw error for invalid node type', async () => { + await expect(db.createNode('', { title: 'Test' })).rejects.toThrow('Node type must be a non-empty string'); // Note: Whitespace-only strings are considered valid by the current validation }); - it('should enforce schema validation when schema is defined', () => { + it('should enforce schema validation when schema is defined', async () => { const schema: GraphSchema = { nodes: { Job: { properties: ['title', 'status'] } @@ -119,20 +119,20 @@ describe('GraphDatabase', () => { const schemaDb = new GraphDatabase(':memory:', { schema }); // Valid node type - const validNode = schemaDb.createNode('Job', { title: 'Engineer' }); + const validNode = await schemaDb.createNode('Job', { title: 'Engineer' }); expect(validNode).toBeDefined(); // Invalid node type - expect(() => schemaDb.createNode('InvalidType', { name: 'Test' })).toThrow(); + await expect(schemaDb.createNode('InvalidType', { name: 'Test' })).rejects.toThrow(); - schemaDb.close(); + await schemaDb.close(); }); }); - describe('getNode', () => { - it('should retrieve existing node by ID', () => { - const created = db.createNode('Job', { title: 'Engineer', status: 'active' }); - const retrieved = db.getNode(created.id); + describe('getNode', async () => { + it('should retrieve existing node by ID', async () => { + const created = await db.createNode('Job', { title: 'Engineer', status: 'active' }); + const retrieved = await db.getNode(created.id); expect(retrieved).toBeDefined(); expect(retrieved?.id).toBe(created.id); @@ -141,19 +141,19 @@ describe('GraphDatabase', () => { expect(retrieved?.properties.status).toBe('active'); }); - it('should return null for non-existent node', () => { - const node = db.getNode(99999); + it('should return null for non-existent node', async () => { + const node = await db.getNode(99999); expect(node).toBeNull(); }); - it('should throw error for invalid node ID', () => { - expect(() => db.getNode(0)).toThrow(); - expect(() => db.getNode(-1)).toThrow(); - expect(() => db.getNode(1.5)).toThrow(); + it('should throw error for invalid node ID', async () => { + await expect(db.getNode(0)).rejects.toThrow(); + await expect(db.getNode(-1)).rejects.toThrow(); + await expect(db.getNode(1.5)).rejects.toThrow(); }); - it('should retrieve node with complex nested properties', () => { - const created = db.createNode('Job', { + it('should retrieve node with complex nested properties', async () => { + const created = await db.createNode('Job', { title: 'Engineer', metadata: { views: 100, @@ -162,7 +162,7 @@ describe('GraphDatabase', () => { } }); - const retrieved = db.getNode(created.id); + const retrieved = await db.getNode(created.id); expect(retrieved?.properties.metadata).toEqual({ views: 100, applicants: 50, @@ -171,10 +171,10 @@ describe('GraphDatabase', () => { }); }); - describe('updateNode', () => { - it('should update node properties', () => { - const created = db.createNode('Job', { title: 'Engineer', status: 'draft' }); - const updated = db.updateNode(created.id, { status: 'active', views: 100 }); + describe('updateNode', async () => { + it('should update node properties', async () => { + const created = await db.createNode('Job', { title: 'Engineer', status: 'draft' }); + const updated = await db.updateNode(created.id, { status: 'active', views: 100 }); expect(updated.id).toBe(created.id); expect(updated.properties.title).toBe('Engineer'); // Original property retained @@ -183,14 +183,14 @@ describe('GraphDatabase', () => { expect(updated.updatedAt.getTime()).toBeGreaterThanOrEqual(created.updatedAt.getTime()); }); - it('should merge properties instead of replacing', () => { - const created = db.createNode('Job', { + it('should merge properties instead of replacing', async () => { + const created = await db.createNode('Job', { title: 'Engineer', status: 'draft', salary: 100000 }); - const updated = db.updateNode(created.id, { status: 'active' }); + const updated = await db.updateNode(created.id, { status: 'active' }); expect(updated.properties).toEqual({ title: 'Engineer', @@ -199,57 +199,57 @@ describe('GraphDatabase', () => { }); }); - it('should update nested properties', () => { - const created = db.createNode('Job', { + it('should update nested properties', async () => { + const created = await db.createNode('Job', { title: 'Engineer', metadata: { views: 10 } }); - const updated = db.updateNode(created.id, { + const updated = await db.updateNode(created.id, { metadata: { views: 20, likes: 5 } }); expect(updated.properties.metadata).toEqual({ views: 20, likes: 5 }); }); - it('should throw error for non-existent node', () => { - expect(() => db.updateNode(99999, { status: 'active' })).toThrow('Node with ID 99999 not found'); + it('should throw error for non-existent node', async () => { + await expect(db.updateNode(99999, { status: 'active' })).rejects.toThrow('Node with ID 99999 not found'); }); - it('should throw error for invalid node ID', () => { - expect(() => db.updateNode(0, { status: 'active' })).toThrow(); - expect(() => db.updateNode(-1, { status: 'active' })).toThrow(); + it('should throw error for invalid node ID', async () => { + await expect(db.updateNode(0, { status: 'active' })).rejects.toThrow(); + await expect(db.updateNode(-1, { status: 'active' })).rejects.toThrow(); }); - it('should allow updating with empty properties object', () => { - const created = db.createNode('Job', { title: 'Engineer' }); - const updated = db.updateNode(created.id, {}); + it('should allow updating with empty properties object', async () => { + const created = await db.createNode('Job', { title: 'Engineer' }); + const updated = await db.updateNode(created.id, {}); expect(updated.properties).toEqual(created.properties); }); }); - describe('deleteNode', () => { - it('should delete existing node', () => { - const node = db.createNode('Job', { title: 'Engineer' }); - const deleted = db.deleteNode(node.id); + describe('deleteNode', async () => { + it('should delete existing node', async () => { + const node = await db.createNode('Job', { title: 'Engineer' }); + const deleted = await db.deleteNode(node.id); expect(deleted).toBe(true); expect(db.getNode(node.id)).toBeNull(); }); - it('should return false for non-existent node', () => { - const deleted = db.deleteNode(99999); + it('should return false for non-existent node', async () => { + const deleted = await db.deleteNode(99999); expect(deleted).toBe(false); }); - it('should delete node and cascade delete edges', () => { - const job = db.createNode('Job', { title: 'Engineer' }); - const company = db.createNode('Company', { name: 'TechCorp' }); - const edge = db.createEdge(job.id, 'POSTED_BY', company.id); + it('should delete node and cascade delete edges', async () => { + const job = await db.createNode('Job', { title: 'Engineer' }); + const company = await db.createNode('Company', { name: 'TechCorp' }); + const edge = await db.createEdge(job.id, 'POSTED_BY', company.id); // Delete the job node - db.deleteNode(job.id); + await db.deleteNode(job.id); // Verify node is deleted expect(db.getNode(job.id)).toBeNull(); @@ -258,33 +258,33 @@ describe('GraphDatabase', () => { expect(db.getEdge(edge.id)).toBeNull(); }); - it('should throw error for invalid node ID', () => { - expect(() => db.deleteNode(0)).toThrow(); - expect(() => db.deleteNode(-1)).toThrow(); + it('should throw error for invalid node ID', async () => { + await expect(db.deleteNode(0)).rejects.toThrow(); + await expect(db.deleteNode(-1)).rejects.toThrow(); }); - it('should handle deleting node multiple times', () => { - const node = db.createNode('Job', { title: 'Engineer' }); + it('should handle deleting node multiple times', async () => { + const node = await db.createNode('Job', { title: 'Engineer' }); - const firstDelete = db.deleteNode(node.id); + const firstDelete = await db.deleteNode(node.id); expect(firstDelete).toBe(true); - const secondDelete = db.deleteNode(node.id); + const secondDelete = await db.deleteNode(node.id); expect(secondDelete).toBe(false); }); }); - describe('createEdge', () => { + describe('createEdge', async () => { let jobNode: Node; let companyNode: Node; - beforeEach(() => { - jobNode = db.createNode('Job', { title: 'Engineer' }); - companyNode = db.createNode('Company', { name: 'TechCorp' }); + beforeEach(async () => { + jobNode = await db.createNode('Job', { title: 'Engineer' }); + companyNode = await db.createNode('Company', { name: 'TechCorp' }); }); - it('should create edge between two nodes', () => { - const edge = db.createEdge(jobNode.id, 'POSTED_BY', companyNode.id); + it('should create edge between two nodes', async () => { + const edge = await db.createEdge(jobNode.id, 'POSTED_BY', companyNode.id); expect(edge).toBeDefined(); expect(edge.id).toBeGreaterThan(0); @@ -294,8 +294,8 @@ describe('GraphDatabase', () => { expect(edge.createdAt).toBeInstanceOf(Date); }); - it('should create edge with properties', () => { - const edge = db.createEdge(jobNode.id, 'REQUIRES', companyNode.id, { + it('should create edge with properties', async () => { + const edge = await db.createEdge(jobNode.id, 'REQUIRES', companyNode.id, { level: 'expert', required: true, years: 5 @@ -308,53 +308,53 @@ describe('GraphDatabase', () => { }); }); - it('should create edge without properties', () => { - const edge = db.createEdge(jobNode.id, 'POSTED_BY', companyNode.id); + it('should create edge without properties', async () => { + const edge = await db.createEdge(jobNode.id, 'POSTED_BY', companyNode.id); expect(edge.properties).toBeUndefined(); }); - it('should create multiple edges between same nodes with different types', () => { - const edge1 = db.createEdge(jobNode.id, 'POSTED_BY', companyNode.id); - const edge2 = db.createEdge(jobNode.id, 'VERIFIED_BY', companyNode.id); + it('should create multiple edges between same nodes with different types', async () => { + const edge1 = await db.createEdge(jobNode.id, 'POSTED_BY', companyNode.id); + const edge2 = await db.createEdge(jobNode.id, 'VERIFIED_BY', companyNode.id); expect(edge1.id).not.toBe(edge2.id); expect(edge1.type).toBe('POSTED_BY'); expect(edge2.type).toBe('VERIFIED_BY'); }); - it('should throw error for non-existent source node', () => { - expect(() => db.createEdge(99999, 'POSTED_BY', companyNode.id)).toThrow('Source node with ID 99999 not found'); + it('should throw error for non-existent source node', async () => { + await expect(db.createEdge(99999, 'POSTED_BY', companyNode.id)).rejects.toThrow('Source node with ID 99999 not found'); }); - it('should throw error for non-existent target node', () => { - expect(() => db.createEdge(jobNode.id, 'POSTED_BY', 99999)).toThrow('Target node with ID 99999 not found'); + it('should throw error for non-existent target node', async () => { + await expect(db.createEdge(jobNode.id, 'POSTED_BY', 99999)).rejects.toThrow('Target node with ID 99999 not found'); }); - it('should throw error for invalid edge type', () => { - expect(() => db.createEdge(jobNode.id, '', companyNode.id)).toThrow('Edge type must be a non-empty string'); + it('should throw error for invalid edge type', async () => { + await expect(db.createEdge(jobNode.id, '', companyNode.id)).rejects.toThrow('Edge type must be a non-empty string'); // Note: Whitespace-only strings are considered valid by the current validation }); - it('should throw error for invalid node IDs', () => { - expect(() => db.createEdge(0, 'POSTED_BY', companyNode.id)).toThrow(); - expect(() => db.createEdge(jobNode.id, 'POSTED_BY', -1)).toThrow(); + it('should throw error for invalid node IDs', async () => { + await expect(db.createEdge(0, 'POSTED_BY', companyNode.id)).rejects.toThrow(); + await expect(db.createEdge(jobNode.id, 'POSTED_BY', -1)).rejects.toThrow(); }); - it('should allow self-referencing edges', () => { - const edge = db.createEdge(jobNode.id, 'SIMILAR_TO', jobNode.id); + it('should allow self-referencing edges', async () => { + const edge = await db.createEdge(jobNode.id, 'SIMILAR_TO', jobNode.id); expect(edge.from).toBe(jobNode.id); expect(edge.to).toBe(jobNode.id); }); }); - describe('getEdge', () => { - it('should retrieve existing edge by ID', () => { - const job = db.createNode('Job', { title: 'Engineer' }); - const company = db.createNode('Company', { name: 'TechCorp' }); - const created = db.createEdge(job.id, 'POSTED_BY', company.id, { verified: true }); + describe('getEdge', async () => { + it('should retrieve existing edge by ID', async () => { + const job = await db.createNode('Job', { title: 'Engineer' }); + const company = await db.createNode('Company', { name: 'TechCorp' }); + const created = await db.createEdge(job.id, 'POSTED_BY', company.id, { verified: true }); - const retrieved = db.getEdge(created.id); + const retrieved = await db.getEdge(created.id); expect(retrieved).toBeDefined(); expect(retrieved?.id).toBe(created.id); @@ -364,57 +364,57 @@ describe('GraphDatabase', () => { expect(retrieved?.properties).toEqual({ verified: true }); }); - it('should return null for non-existent edge', () => { - const edge = db.getEdge(99999); + it('should return null for non-existent edge', async () => { + const edge = await db.getEdge(99999); expect(edge).toBeNull(); }); - it('should throw error for invalid edge ID', () => { - expect(() => db.getEdge(0)).toThrow(); - expect(() => db.getEdge(-1)).toThrow(); + it('should throw error for invalid edge ID', async () => { + await expect(db.getEdge(0)).rejects.toThrow(); + await expect(db.getEdge(-1)).rejects.toThrow(); }); }); - describe('deleteEdge', () => { - it('should delete existing edge', () => { - const job = db.createNode('Job', { title: 'Engineer' }); - const company = db.createNode('Company', { name: 'TechCorp' }); - const edge = db.createEdge(job.id, 'POSTED_BY', company.id); + describe('deleteEdge', async () => { + it('should delete existing edge', async () => { + const job = await db.createNode('Job', { title: 'Engineer' }); + const company = await db.createNode('Company', { name: 'TechCorp' }); + const edge = await db.createEdge(job.id, 'POSTED_BY', company.id); - const deleted = db.deleteEdge(edge.id); + const deleted = await db.deleteEdge(edge.id); expect(deleted).toBe(true); expect(db.getEdge(edge.id)).toBeNull(); }); - it('should return false for non-existent edge', () => { - const deleted = db.deleteEdge(99999); + it('should return false for non-existent edge', async () => { + const deleted = await db.deleteEdge(99999); expect(deleted).toBe(false); }); - it('should not delete nodes when edge is deleted', () => { - const job = db.createNode('Job', { title: 'Engineer' }); - const company = db.createNode('Company', { name: 'TechCorp' }); - const edge = db.createEdge(job.id, 'POSTED_BY', company.id); + it('should not delete nodes when edge is deleted', async () => { + const job = await db.createNode('Job', { title: 'Engineer' }); + const company = await db.createNode('Company', { name: 'TechCorp' }); + const edge = await db.createEdge(job.id, 'POSTED_BY', company.id); - db.deleteEdge(edge.id); + await db.deleteEdge(edge.id); expect(db.getNode(job.id)).toBeDefined(); expect(db.getNode(company.id)).toBeDefined(); }); - it('should throw error for invalid edge ID', () => { - expect(() => db.deleteEdge(0)).toThrow(); - expect(() => db.deleteEdge(-1)).toThrow(); + it('should throw error for invalid edge ID', async () => { + await expect(db.deleteEdge(0)).rejects.toThrow(); + await expect(db.deleteEdge(-1)).rejects.toThrow(); }); }); - describe('transaction', () => { - it('should commit transaction on success', () => { - const result = db.transaction(() => { - const job = db.createNode('Job', { title: 'Engineer' }); - const company = db.createNode('Company', { name: 'TechCorp' }); - db.createEdge(job.id, 'POSTED_BY', company.id); + describe('transaction', async () => { + it('should commit transaction on success', async () => { + const result = await db.transaction(async () => { + const job = await db.createNode('Job', { title: 'Engineer' }); + const company = await db.createNode('Company', { name: 'TechCorp' }); + await db.createEdge(job.id, 'POSTED_BY', company.id); return { job, company }; }); @@ -424,94 +424,88 @@ describe('GraphDatabase', () => { expect(db.getNode(result.company.id)).toBeDefined(); }); - it('should rollback transaction on error', () => { - expect(() => { - db.transaction(() => { - db.createNode('Job', { title: 'Engineer' }); + it('should rollback transaction on error', async () => { + await expect(db.transaction(async () => { + await db.createNode('Job', { title: 'Engineer' }); throw new Error('Test error'); - }); - }).toThrow('Test error'); + })).rejects.toThrow('Test error'); // Verify rollback - no nodes should exist - const allNodes = db.nodes('Job').exec(); + const allNodes = await db.nodes('Job').exec(); expect(allNodes).toHaveLength(0); }); - it('should support manual commit', () => { - db.transaction((ctx) => { - db.createNode('Job', { title: 'Engineer' }); + it('should support manual commit', async () => { + await db.transaction(async (ctx) => { + await db.createNode('Job', { title: 'Engineer' }); ctx.commit(); }); - const nodes = db.nodes('Job').exec(); + const nodes = await db.nodes('Job').exec(); expect(nodes).toHaveLength(1); }); - it('should support manual rollback', () => { - db.transaction((ctx) => { - db.createNode('Job', { title: 'Engineer' }); + it('should support manual rollback', async () => { + await db.transaction(async (ctx) => { + await db.createNode('Job', { title: 'Engineer' }); ctx.rollback(); }); - const nodes = db.nodes('Job').exec(); + const nodes = await db.nodes('Job').exec(); expect(nodes).toHaveLength(0); }); - it('should support savepoints', () => { - db.transaction((ctx) => { - const job1 = db.createNode('Job', { title: 'Job 1' }); + it('should support savepoints', async () => { + await db.transaction(async (ctx) => { + const job1 = await db.createNode('Job', { title: 'Job 1' }); ctx.savepoint('sp1'); - const job2 = db.createNode('Job', { title: 'Job 2' }); + const job2 = await db.createNode('Job', { title: 'Job 2' }); ctx.rollbackTo('sp1'); - const job3 = db.createNode('Job', { title: 'Job 3' }); + const job3 = await db.createNode('Job', { title: 'Job 3' }); }); - const jobs = db.nodes('Job').exec(); + const jobs = await db.nodes('Job').exec(); expect(jobs).toHaveLength(2); expect(jobs.map(j => j.properties.title).sort()).toEqual(['Job 1', 'Job 3']); }); - it('should throw error when committing finalized transaction', () => { - expect(() => { - db.transaction((ctx) => { + it('should throw error when committing finalized transaction', async () => { + await expect(db.transaction(async (ctx) => { ctx.commit(); ctx.commit(); // Second commit should throw - }); - }).toThrow(TransactionAlreadyFinalizedError); + })).rejects.toThrow(TransactionAlreadyFinalizedError); }); - it('should throw error when rolling back finalized transaction', () => { - expect(() => { - db.transaction((ctx) => { + it('should throw error when rolling back finalized transaction', async () => { + await expect(db.transaction(async (ctx) => { ctx.commit(); ctx.rollback(); // Rollback after commit should throw - }); - }).toThrow(TransactionAlreadyFinalizedError); + })).rejects.toThrow(TransactionAlreadyFinalizedError); }); - it('should handle nested savepoints', () => { - db.transaction((ctx) => { - db.createNode('Job', { title: 'Job 1' }); + it('should handle nested savepoints', async () => { + await db.transaction(async (ctx) => { + await db.createNode('Job', { title: 'Job 1' }); ctx.savepoint('sp1'); - db.createNode('Job', { title: 'Job 2' }); + await db.createNode('Job', { title: 'Job 2' }); ctx.savepoint('sp2'); - db.createNode('Job', { title: 'Job 3' }); + await db.createNode('Job', { title: 'Job 3' }); ctx.rollbackTo('sp2'); - db.createNode('Job', { title: 'Job 4' }); + await db.createNode('Job', { title: 'Job 4' }); }); - const jobs = db.nodes('Job').exec(); + const jobs = await db.nodes('Job').exec(); expect(jobs).toHaveLength(3); expect(jobs.map(j => j.properties.title).sort()).toEqual(['Job 1', 'Job 2', 'Job 4']); }); - it('should return transaction function result', () => { - const result = db.transaction(() => { + it('should return transaction function result', async () => { + const result = await db.transaction(async () => { return { value: 42, message: 'success' }; }); @@ -519,9 +513,9 @@ describe('GraphDatabase', () => { }); }); - describe('export', () => { - it('should export empty database', () => { - const exported = db.export(); + describe('export', async () => { + it('should export empty database', async () => { + const exported = await db.export(); expect(exported.nodes).toEqual([]); expect(exported.edges).toEqual([]); @@ -530,75 +524,75 @@ describe('GraphDatabase', () => { expect(exported.metadata?.exportedAt).toBeDefined(); }); - it('should export nodes only', () => { - db.createNode('Job', { title: 'Engineer' }); - db.createNode('Company', { name: 'TechCorp' }); + it('should export nodes only', async () => { + await db.createNode('Job', { title: 'Engineer' }); + await db.createNode('Company', { name: 'TechCorp' }); - const exported = db.export(); + const exported = await db.export(); expect(exported.nodes).toHaveLength(2); expect(exported.edges).toHaveLength(0); }); - it('should export nodes and edges', () => { - const job = db.createNode('Job', { title: 'Engineer' }); - const company = db.createNode('Company', { name: 'TechCorp' }); - db.createEdge(job.id, 'POSTED_BY', company.id); + it('should export nodes and edges', async () => { + const job = await db.createNode('Job', { title: 'Engineer' }); + const company = await db.createNode('Company', { name: 'TechCorp' }); + await db.createEdge(job.id, 'POSTED_BY', company.id); - const exported = db.export(); + const exported = await db.export(); expect(exported.nodes).toHaveLength(2); expect(exported.edges).toHaveLength(1); expect(exported.edges[0].type).toBe('POSTED_BY'); }); - it('should preserve all node properties in export', () => { - const created = db.createNode('Job', { + it('should preserve all node properties in export', async () => { + const created = await db.createNode('Job', { title: 'Engineer', nested: { value: 'test' }, array: [1, 2, 3] }); - const exported = db.export(); + const exported = await db.export(); expect(exported.nodes[0].properties).toEqual(created.properties); }); - it('should preserve all edge properties in export', () => { - const job = db.createNode('Job', { title: 'Engineer' }); - const company = db.createNode('Company', { name: 'TechCorp' }); - db.createEdge(job.id, 'POSTED_BY', company.id, { verified: true, rating: 5 }); + it('should preserve all edge properties in export', async () => { + const job = await db.createNode('Job', { title: 'Engineer' }); + const company = await db.createNode('Company', { name: 'TechCorp' }); + await db.createEdge(job.id, 'POSTED_BY', company.id, { verified: true, rating: 5 }); - const exported = db.export(); + const exported = await db.export(); expect(exported.edges[0].properties).toEqual({ verified: true, rating: 5 }); }); - it('should include timestamps in export', () => { - const node = db.createNode('Job', { title: 'Engineer' }); - const exported = db.export(); + it('should include timestamps in export', async () => { + const node = await db.createNode('Job', { title: 'Engineer' }); + const exported = await db.export(); expect(exported.nodes[0].createdAt).toBeInstanceOf(Date); expect(exported.nodes[0].updatedAt).toBeInstanceOf(Date); }); }); - describe('import', () => { - it('should import empty dataset', () => { + describe('import', async () => { + it('should import empty dataset', async () => { const data: GraphExport = { nodes: [], edges: [], metadata: { version: '1', exportedAt: new Date().toISOString() } }; - db.import(data); + await db.import(data); - const exported = db.export(); + const exported = await db.export(); expect(exported.nodes).toHaveLength(0); expect(exported.edges).toHaveLength(0); }); - it('should import nodes only', () => { + it('should import nodes only', async () => { const data: GraphExport = { nodes: [ { id: 1, type: 'Job', properties: { title: 'Engineer' }, createdAt: new Date(), updatedAt: new Date() }, @@ -608,14 +602,14 @@ describe('GraphDatabase', () => { metadata: { version: '1', exportedAt: new Date().toISOString() } }; - db.import(data); + await db.import(data); - const nodes = db.nodes('Job').exec(); + const nodes = await db.nodes('Job').exec(); expect(nodes).toHaveLength(1); expect(nodes[0].properties.title).toBe('Engineer'); }); - it('should import nodes and edges', () => { + it('should import nodes and edges', async () => { const data: GraphExport = { nodes: [ { id: 1, type: 'Job', properties: { title: 'Engineer' }, createdAt: new Date(), updatedAt: new Date() }, @@ -627,14 +621,14 @@ describe('GraphDatabase', () => { metadata: { version: '1', exportedAt: new Date().toISOString() } }; - db.import(data); + await db.import(data); - const exported = db.export(); + const exported = await db.export(); expect(exported.nodes).toHaveLength(2); expect(exported.edges).toHaveLength(1); }); - it('should handle import in transaction', () => { + it('should handle import in transaction', async () => { const data: GraphExport = { nodes: [ { id: 1, type: 'Job', properties: { title: 'Engineer' }, createdAt: new Date(), updatedAt: new Date() } @@ -643,14 +637,14 @@ describe('GraphDatabase', () => { metadata: { version: '1', exportedAt: new Date().toISOString() } }; - db.import(data); + await db.import(data); // Verify transaction completed - const nodes = db.nodes('Job').exec(); + const nodes = await db.nodes('Job').exec(); expect(nodes).toHaveLength(1); }); - it('should rollback import on error', () => { + it('should rollback import on error', async () => { const data: GraphExport = { nodes: [ { id: 1, type: 'Job', properties: { title: 'Engineer' }, createdAt: new Date(), updatedAt: new Date() } @@ -662,14 +656,14 @@ describe('GraphDatabase', () => { metadata: { version: '1', exportedAt: new Date().toISOString() } }; - expect(() => db.import(data)).toThrow(); + await expect(db.import(data)).rejects.toThrow(); // Verify rollback - no nodes should be imported - const nodes = db.nodes('Job').exec(); + const nodes = await db.nodes('Job').exec(); expect(nodes).toHaveLength(0); }); - it('should preserve complex properties on import', () => { + it('should preserve complex properties on import', async () => { const data: GraphExport = { nodes: [ { @@ -688,55 +682,55 @@ describe('GraphDatabase', () => { metadata: { version: '1', exportedAt: new Date().toISOString() } }; - db.import(data); + await db.import(data); - const nodes = db.nodes('Job').exec(); + const nodes = await db.nodes('Job').exec(); expect(nodes[0].properties.nested).toEqual({ deep: { value: 'test' } }); expect(nodes[0].properties.array).toEqual([1, 2, 3]); }); - it('should handle round-trip export/import', () => { + it('should handle round-trip export/import', async () => { // Create original data - const job = db.createNode('Job', { title: 'Engineer', status: 'active' }); - const company = db.createNode('Company', { name: 'TechCorp' }); - db.createEdge(job.id, 'POSTED_BY', company.id, { verified: true }); + const job = await db.createNode('Job', { title: 'Engineer', status: 'active' }); + const company = await db.createNode('Company', { name: 'TechCorp' }); + await db.createEdge(job.id, 'POSTED_BY', company.id, { verified: true }); // Export - const exported = db.export(); + const exported = await db.export(); // Create new database and import const newDb = new GraphDatabase(':memory:'); - newDb.import(exported); + await newDb.import(exported); // Verify - const newExported = newDb.export(); + const newExported = await newDb.export(); expect(newExported.nodes).toHaveLength(2); expect(newExported.edges).toHaveLength(1); expect(newExported.nodes.find(n => n.type === 'Job')?.properties.title).toBe('Engineer'); - newDb.close(); + await newDb.close(); }); }); - describe('nodes', () => { - it('should return NodeQuery instance', () => { + describe('nodes', async () => { + it('should return NodeQuery instance', async () => { const query = db.nodes('Job'); expect(query).toBeDefined(); expect(typeof query.exec).toBe('function'); }); - it('should execute basic query', () => { - db.createNode('Job', { title: 'Engineer' }); - db.createNode('Job', { title: 'Designer' }); + it('should execute basic query', async () => { + await db.createNode('Job', { title: 'Engineer' }); + await db.createNode('Job', { title: 'Designer' }); - const jobs = db.nodes('Job').exec(); + const jobs = await db.nodes('Job').exec(); expect(jobs).toHaveLength(2); }); }); - describe('traverse', () => { - it('should return TraversalQuery instance', () => { - const node = db.createNode('Job', { title: 'Engineer' }); + describe('traverse', async () => { + it('should return TraversalQuery instance', async () => { + const node = await db.createNode('Job', { title: 'Engineer' }); const query = db.traverse(node.id); expect(query).toBeDefined(); @@ -752,18 +746,18 @@ describe('GraphDatabase', () => { }); }); - describe('close', () => { - it('should close database connection', () => { + describe('close', async () => { + it('should close database connection', async () => { const testDb = new GraphDatabase(':memory:'); - testDb.close(); + await testDb.close(); // After close, operations should fail - expect(() => testDb.createNode('Job', { title: 'Test' })).toThrow(); + await expect(testDb.createNode('Job', { title: 'Test' })).rejects.toThrow(); }); - it('should handle multiple close calls', () => { + it('should handle multiple close calls', async () => { const testDb = new GraphDatabase(':memory:'); - testDb.close(); + await testDb.close(); // Second close might throw, but shouldn't crash expect(() => testDb.close()).not.toThrow(); @@ -780,16 +774,16 @@ describe('GraphDatabase', () => { }); }); - describe('edge cases and error handling', () => { - it('should handle very long strings in properties', () => { + describe('edge cases and error handling', async () => { + it('should handle very long strings in properties', async () => { const longString = 'a'.repeat(10000); - const node = db.createNode('Job', { description: longString }); + const node = await db.createNode('Job', { description: longString }); expect(node.properties.description).toBe(longString); }); - it('should handle unicode characters in properties', () => { - const node = db.createNode('Job', { + it('should handle unicode characters in properties', async () => { + const node = await db.createNode('Job', { title: '工程师', emoji: '🚀💻🔥', special: 'Çüé' @@ -799,8 +793,8 @@ describe('GraphDatabase', () => { expect(node.properties.emoji).toBe('🚀💻🔥'); }); - it('should handle null and undefined in properties', () => { - const node = db.createNode('Job', { + it('should handle null and undefined in properties', async () => { + const node = await db.createNode('Job', { title: 'Engineer', optionalField: null, undefinedField: undefined @@ -810,8 +804,8 @@ describe('GraphDatabase', () => { expect(node.properties.undefinedField).toBeUndefined(); }); - it('should handle boolean values in properties', () => { - const node = db.createNode('Job', { + it('should handle boolean values in properties', async () => { + const node = await db.createNode('Job', { active: true, remote: false }); @@ -820,42 +814,42 @@ describe('GraphDatabase', () => { expect(node.properties.remote).toBe(false); }); - it('should handle date objects in properties', () => { + it('should handle date objects in properties', async () => { const date = new Date('2025-10-28T12:00:00Z'); - const node = db.createNode('Job', { postedDate: date }); + const node = await db.createNode('Job', { postedDate: date }); // Date objects are serialized to ISO strings via JSON.stringify expect(node.properties.postedDate).toBe(date.toISOString()); }); - it('should handle mixed type arrays', () => { - const node = db.createNode('Job', { + it('should handle mixed type arrays', async () => { + const node = await db.createNode('Job', { mixed: [1, 'string', true, null, { nested: 'object' }] }); expect(node.properties.mixed).toEqual([1, 'string', true, null, { nested: 'object' }]); }); - it('should handle large number of nodes', () => { + it('should handle large number of nodes', async () => { const count = 1000; for (let i = 0; i < count; i++) { - db.createNode('Job', { index: i }); + await db.createNode('Job', { index: i }); } - const nodes = db.nodes('Job').exec(); + const nodes = await db.nodes('Job').exec(); expect(nodes.length).toBe(count); }); - it('should handle large number of edges', () => { - const node1 = db.createNode('Hub', { name: 'Hub' }); + it('should handle large number of edges', async () => { + const node1 = await db.createNode('Hub', { name: 'Hub' }); const count = 500; for (let i = 0; i < count; i++) { - const node = db.createNode('Node', { index: i }); - db.createEdge(node1.id, 'CONNECTS', node.id); + const node = await db.createNode('Node', { index: i }); + await db.createEdge(node1.id, 'CONNECTS', node.id); } - const exported = db.export(); + const exported = await db.export(); expect(exported.edges.length).toBe(count); }); }); diff --git a/tests/unit/NodeQuery-both-direction.test.ts b/tests/unit/NodeQuery-both-direction.test.ts index af10133..dac2364 100644 --- a/tests/unit/NodeQuery-both-direction.test.ts +++ b/tests/unit/NodeQuery-both-direction.test.ts @@ -1,10 +1,10 @@ import { describe, it, expect, beforeEach, afterEach } from '@jest/globals'; import { GraphDatabase } from '../../src/core/Database'; -describe('NodeQuery - Both Direction Support', () => { +describe('NodeQuery - Both Direction Support', async () => { let db: GraphDatabase; - beforeEach(() => { + beforeEach(async () => { db = new GraphDatabase(':memory:'); // Create test graph: @@ -12,24 +12,24 @@ describe('NodeQuery - Both Direction Support', () => { // Bob -KNOWS-> Alice // Alice -KNOWS-> Charlie // David -KNOWS-> Alice - const alice = db.createNode('Person', { name: 'Alice' }); - const bob = db.createNode('Person', { name: 'Bob' }); - const charlie = db.createNode('Person', { name: 'Charlie' }); - const david = db.createNode('Person', { name: 'David' }); - - db.createEdge(alice.id, 'KNOWS', bob.id); - db.createEdge(bob.id, 'KNOWS', alice.id); - db.createEdge(alice.id, 'KNOWS', charlie.id); - db.createEdge(david.id, 'KNOWS', alice.id); + const alice = await db.createNode('Person', { name: 'Alice' }); + const bob = await db.createNode('Person', { name: 'Bob' }); + const charlie = await db.createNode('Person', { name: 'Charlie' }); + const david = await db.createNode('Person', { name: 'David' }); + + await db.createEdge(alice.id, 'KNOWS', bob.id); + await db.createEdge(bob.id, 'KNOWS', alice.id); + await db.createEdge(alice.id, 'KNOWS', charlie.id); + await db.createEdge(david.id, 'KNOWS', alice.id); }); - afterEach(() => { - db.close(); + afterEach(async () => { + await db.close(); }); - it('should find nodes connected in both directions', () => { + it('should find nodes connected in both directions', async () => { // Find all people Alice knows (both outgoing and incoming KNOWS edges) - const aliceConnections = db.nodes('Person') + const aliceConnections = await db.nodes('Person') .where({ name: 'Alice' }) .connectedTo('Person', 'KNOWS', 'both') .exec(); @@ -45,36 +45,36 @@ describe('NodeQuery - Both Direction Support', () => { // Let's test a clearer scenario }); - it('should find all people connected to Alice in either direction', () => { + it('should find all people connected to Alice in either direction', async () => { // We need to construct a query that finds people who have KNOWS edges // to/from Alice in either direction // First, get Alice's ID - const alice = db.nodes('Person').where({ name: 'Alice' }).first(); + const alice = await db.nodes('Person').where({ name: 'Alice' }).first(); expect(alice).toBeTruthy(); // This is actually testing a different pattern - we'd need to query // for edges and then get nodes. Let me reconsider the test approach. }); - it('should support both direction in connectedTo queries', () => { + it('should support both direction in connectedTo queries', async () => { // Create a clearer test case // Job1 -SIMILAR_TO-> Job2 // Job3 -SIMILAR_TO-> Job2 // Job2 -SIMILAR_TO-> Job4 - const job1 = db.createNode('Job', { title: 'Job 1' }); - const job2 = db.createNode('Job', { title: 'Job 2' }); - const job3 = db.createNode('Job', { title: 'Job 3' }); - const job4 = db.createNode('Job', { title: 'Job 4' }); + const job1 = await db.createNode('Job', { title: 'Job 1' }); + const job2 = await db.createNode('Job', { title: 'Job 2' }); + const job3 = await db.createNode('Job', { title: 'Job 3' }); + const job4 = await db.createNode('Job', { title: 'Job 4' }); - db.createEdge(job1.id, 'SIMILAR_TO', job2.id); - db.createEdge(job3.id, 'SIMILAR_TO', job2.id); - db.createEdge(job2.id, 'SIMILAR_TO', job4.id); + await db.createEdge(job1.id, 'SIMILAR_TO', job2.id); + await db.createEdge(job3.id, 'SIMILAR_TO', job2.id); + await db.createEdge(job2.id, 'SIMILAR_TO', job4.id); // Find jobs that have SIMILAR_TO connections in both directions to Job type nodes // Starting from Job2, it should find jobs connected in either direction - const results = db.nodes('Job') + const results = await db.nodes('Job') .where({ title: 'Job 2' }) .connectedTo('Job', 'SIMILAR_TO', 'both') .exec(); @@ -86,31 +86,31 @@ describe('NodeQuery - Both Direction Support', () => { // Actually this doesn't make sense as written. Let me fix the test. }); - it('should find companies that have bidirectional relationships with jobs', () => { + it('should find companies that have bidirectional relationships with jobs', async () => { // Better test: Find companies that are connected to active jobs in either direction - const company1 = db.createNode('Company', { name: 'Company 1' }); - const company2 = db.createNode('Company', { name: 'Company 2' }); - const company3 = db.createNode('Company', { name: 'Company 3' }); + const company1 = await db.createNode('Company', { name: 'Company 1' }); + const company2 = await db.createNode('Company', { name: 'Company 2' }); + const company3 = await db.createNode('Company', { name: 'Company 3' }); - const job1 = db.createNode('Job', { title: 'Job 1', status: 'active' }); - const job2 = db.createNode('Job', { title: 'Job 2', status: 'active' }); - const job3 = db.createNode('Job', { title: 'Job 3', status: 'inactive' }); + const job1 = await db.createNode('Job', { title: 'Job 1', status: 'active' }); + const job2 = await db.createNode('Job', { title: 'Job 2', status: 'active' }); + const job3 = await db.createNode('Job', { title: 'Job 3', status: 'inactive' }); // Company1 -> Job1 (POSTED_BY) - db.createEdge(job1.id, 'POSTED_BY', company1.id); + await db.createEdge(job1.id, 'POSTED_BY', company1.id); // Company2 -> Job2 (POSTED_BY) - db.createEdge(job2.id, 'POSTED_BY', company2.id); + await db.createEdge(job2.id, 'POSTED_BY', company2.id); // Company3 -> Job3 (POSTED_BY) - inactive - db.createEdge(job3.id, 'POSTED_BY', company3.id); + await db.createEdge(job3.id, 'POSTED_BY', company3.id); // Also create reverse edges for partnership // Company1 -> Company2 (PARTNERS_WITH) // Company2 -> Company1 (PARTNERS_WITH) - db.createEdge(company1.id, 'PARTNERS_WITH', company2.id); - db.createEdge(company2.id, 'PARTNERS_WITH', company1.id); + await db.createEdge(company1.id, 'PARTNERS_WITH', company2.id); + await db.createEdge(company2.id, 'PARTNERS_WITH', company1.id); // Find companies connected to other companies via PARTNERS_WITH in both directions - const partners = db.nodes('Company') + const partners = await db.nodes('Company') .connectedTo('Company', 'PARTNERS_WITH', 'both') .exec(); @@ -120,21 +120,21 @@ describe('NodeQuery - Both Direction Support', () => { expect(names).toEqual(['Company 1', 'Company 2']); }); - it('should use DISTINCT to avoid duplicate results with both direction', () => { + it('should use DISTINCT to avoid duplicate results with both direction', async () => { // Create mutual connections - const person1 = db.createNode('Person', { name: 'Person 1' }); - const person2 = db.createNode('Person', { name: 'Person 2' }); - const person3 = db.createNode('Person', { name: 'Person 3' }); + const person1 = await db.createNode('Person', { name: 'Person 1' }); + const person2 = await db.createNode('Person', { name: 'Person 2' }); + const person3 = await db.createNode('Person', { name: 'Person 3' }); // Bidirectional friendship - db.createEdge(person1.id, 'FRIENDS_WITH', person2.id); - db.createEdge(person2.id, 'FRIENDS_WITH', person1.id); + await db.createEdge(person1.id, 'FRIENDS_WITH', person2.id); + await db.createEdge(person2.id, 'FRIENDS_WITH', person1.id); // One-way friendship - db.createEdge(person1.id, 'FRIENDS_WITH', person3.id); + await db.createEdge(person1.id, 'FRIENDS_WITH', person3.id); // Query for people who have FRIENDS_WITH connections (both directions) - const results = db.nodes('Person') + const results = await db.nodes('Person') .connectedTo('Person', 'FRIENDS_WITH', 'both') .exec(); @@ -144,30 +144,30 @@ describe('NodeQuery - Both Direction Support', () => { expect(ids.length).toBe(uniqueIds.length); }); - it('should correctly filter with both direction and additional where clauses', () => { + it('should correctly filter with both direction and additional where clauses', async () => { // Jobs with skills - const job1 = db.createNode('Job', { title: 'Frontend Job', status: 'active' }); - const job2 = db.createNode('Job', { title: 'Backend Job', status: 'active' }); - const job3 = db.createNode('Job', { title: 'Fullstack Job', status: 'inactive' }); + const job1 = await db.createNode('Job', { title: 'Frontend Job', status: 'active' }); + const job2 = await db.createNode('Job', { title: 'Backend Job', status: 'active' }); + const job3 = await db.createNode('Job', { title: 'Fullstack Job', status: 'inactive' }); - const skillReact = db.createNode('Skill', { name: 'React' }); - const skillNode = db.createNode('Skill', { name: 'Node.js' }); + const skillReact = await db.createNode('Skill', { name: 'React' }); + const skillNode = await db.createNode('Skill', { name: 'Node.js' }); // Job1 requires React - db.createEdge(job1.id, 'REQUIRES', skillReact.id); + await db.createEdge(job1.id, 'REQUIRES', skillReact.id); // React is required by Job1 (reverse for testing 'in') - db.createEdge(skillReact.id, 'REQUIRED_BY', job1.id); + await db.createEdge(skillReact.id, 'REQUIRED_BY', job1.id); // Job2 requires Node - db.createEdge(job2.id, 'REQUIRES', skillNode.id); - db.createEdge(skillNode.id, 'REQUIRED_BY', job2.id); + await db.createEdge(job2.id, 'REQUIRES', skillNode.id); + await db.createEdge(skillNode.id, 'REQUIRED_BY', job2.id); // Job3 requires both (but inactive) - db.createEdge(job3.id, 'REQUIRES', skillReact.id); - db.createEdge(job3.id, 'REQUIRES', skillNode.id); + await db.createEdge(job3.id, 'REQUIRES', skillReact.id); + await db.createEdge(job3.id, 'REQUIRES', skillNode.id); // Find active jobs that have skill requirements (either direction) - const results = db.nodes('Job') + const results = await db.nodes('Job') .where({ status: 'active' }) .connectedTo('Skill', 'REQUIRES', 'both') .exec(); @@ -178,39 +178,39 @@ describe('NodeQuery - Both Direction Support', () => { }); }); - it('should work with both direction and count()', () => { - const node1 = db.createNode('Node', { id: 1 }); - const node2 = db.createNode('Node', { id: 2 }); - const node3 = db.createNode('Node', { id: 3 }); + it('should work with both direction and count()', async () => { + const node1 = await db.createNode('Node', { id: 1 }); + const node2 = await db.createNode('Node', { id: 2 }); + const node3 = await db.createNode('Node', { id: 3 }); - db.createEdge(node1.id, 'LINKS', node2.id); - db.createEdge(node2.id, 'LINKS', node1.id); - db.createEdge(node2.id, 'LINKS', node3.id); + await db.createEdge(node1.id, 'LINKS', node2.id); + await db.createEdge(node2.id, 'LINKS', node1.id); + await db.createEdge(node2.id, 'LINKS', node3.id); - const count = db.nodes('Node') + const count = await db.nodes('Node') .connectedTo('Node', 'LINKS', 'both') .count(); expect(count).toBeGreaterThan(0); }); - it('should work with both direction and exists()', () => { - const node1 = db.createNode('Node', { id: 1 }); - const node2 = db.createNode('Node', { id: 2 }); + it('should work with both direction and exists()', async () => { + const node1 = await db.createNode('Node', { id: 1 }); + const node2 = await db.createNode('Node', { id: 2 }); - db.createEdge(node1.id, 'LINKS', node2.id); + await db.createEdge(node1.id, 'LINKS', node2.id); - const exists = db.nodes('Node') + const exists = await db.nodes('Node') .connectedTo('Node', 'LINKS', 'both') .exists(); expect(exists).toBe(true); }); - it('should handle both direction with no connections', () => { - db.createNode('Isolated', { name: 'Lonely Node' }); + it('should handle both direction with no connections', async () => { + await db.createNode('Isolated', { name: 'Lonely Node' }); - const results = db.nodes('Isolated') + const results = await db.nodes('Isolated') .connectedTo('Isolated', 'NEVER', 'both') .exec(); diff --git a/tests/unit/NodeQuery.test.ts b/tests/unit/NodeQuery.test.ts index 44e10ec..13479fe 100644 --- a/tests/unit/NodeQuery.test.ts +++ b/tests/unit/NodeQuery.test.ts @@ -2,18 +2,18 @@ import { describe, it, expect, beforeEach, afterEach } from '@jest/globals'; import { GraphDatabase } from '../../src/core/Database'; import { Node } from '../../src/types'; -describe('NodeQuery', () => { +describe('NodeQuery', async () => { let db: GraphDatabase; - beforeEach(() => { + beforeEach(async () => { db = new GraphDatabase(':memory:'); // Seed test data - const company1 = db.createNode('Company', { name: 'TechCorp', industry: 'SaaS', size: 1000 }); - const company2 = db.createNode('Company', { name: 'DataCo', industry: 'Analytics', size: 500 }); - const company3 = db.createNode('Company', { name: 'CloudBase', industry: 'SaaS', size: 2000 }); + const company1 = await db.createNode('Company', { name: 'TechCorp', industry: 'SaaS', size: 1000 }); + const company2 = await db.createNode('Company', { name: 'DataCo', industry: 'Analytics', size: 500 }); + const company3 = await db.createNode('Company', { name: 'CloudBase', industry: 'SaaS', size: 2000 }); - const job1 = db.createNode('Job', { + const job1 = await db.createNode('Job', { title: 'Senior Engineer', status: 'active', remote: true, @@ -21,7 +21,7 @@ describe('NodeQuery', () => { posted: '2025-01-15' }); - const job2 = db.createNode('Job', { + const job2 = await db.createNode('Job', { title: 'Junior Developer', status: 'closed', remote: false, @@ -29,7 +29,7 @@ describe('NodeQuery', () => { posted: '2025-01-10' }); - const job3 = db.createNode('Job', { + const job3 = await db.createNode('Job', { title: 'Staff Engineer', status: 'active', remote: true, @@ -37,7 +37,7 @@ describe('NodeQuery', () => { posted: '2025-01-20' }); - const job4 = db.createNode('Job', { + const job4 = await db.createNode('Job', { title: 'Lead Developer', status: 'active', remote: false, @@ -45,28 +45,28 @@ describe('NodeQuery', () => { posted: '2025-01-12' }); - const skill1 = db.createNode('Skill', { name: 'TypeScript', level: 'expert' }); - const skill2 = db.createNode('Skill', { name: 'Python', level: 'intermediate' }); - const skill3 = db.createNode('Skill', { name: 'React', level: 'advanced' }); + const skill1 = await db.createNode('Skill', { name: 'TypeScript', level: 'expert' }); + const skill2 = await db.createNode('Skill', { name: 'Python', level: 'intermediate' }); + const skill3 = await db.createNode('Skill', { name: 'React', level: 'advanced' }); // Create relationships - db.createEdge(job1.id, 'POSTED_BY', company1.id); - db.createEdge(job2.id, 'POSTED_BY', company2.id); - db.createEdge(job3.id, 'POSTED_BY', company3.id); - db.createEdge(job4.id, 'POSTED_BY', company1.id); - - db.createEdge(job1.id, 'REQUIRES', skill1.id); - db.createEdge(job1.id, 'REQUIRES', skill3.id); - db.createEdge(job3.id, 'REQUIRES', skill1.id); - db.createEdge(job3.id, 'REQUIRES', skill2.id); + await db.createEdge(job1.id, 'POSTED_BY', company1.id); + await db.createEdge(job2.id, 'POSTED_BY', company2.id); + await db.createEdge(job3.id, 'POSTED_BY', company3.id); + await db.createEdge(job4.id, 'POSTED_BY', company1.id); + + await db.createEdge(job1.id, 'REQUIRES', skill1.id); + await db.createEdge(job1.id, 'REQUIRES', skill3.id); + await db.createEdge(job3.id, 'REQUIRES', skill1.id); + await db.createEdge(job3.id, 'REQUIRES', skill2.id); }); - afterEach(() => { - db.close(); + afterEach(async () => { + await db.close(); }); - describe('Fluent API method chaining', () => { - it('should return query instance for chaining where()', () => { + describe('Fluent API method chaining', async () => { + it('should return query instance for chaining where()', async () => { const query = db.nodes('Job') .where({ status: 'active' }) .where({ remote: true }); @@ -75,7 +75,7 @@ describe('NodeQuery', () => { expect(typeof query.exec).toBe('function'); }); - it('should return query instance for chaining connectedTo()', () => { + it('should return query instance for chaining connectedTo()', async () => { const query = db.nodes('Job') .connectedTo('Company', 'POSTED_BY') .connectedTo('Skill', 'REQUIRES'); @@ -84,28 +84,28 @@ describe('NodeQuery', () => { expect(typeof query.exec).toBe('function'); }); - it('should return query instance for chaining orderBy()', () => { + it('should return query instance for chaining orderBy()', async () => { const query = db.nodes('Job').orderBy('salary', 'desc'); expect(query).toBeDefined(); expect(typeof query.exec).toBe('function'); }); - it('should return query instance for chaining limit()', () => { + it('should return query instance for chaining limit()', async () => { const query = db.nodes('Job').limit(10); expect(query).toBeDefined(); expect(typeof query.exec).toBe('function'); }); - it('should return query instance for chaining offset()', () => { + it('should return query instance for chaining offset()', async () => { const query = db.nodes('Job').offset(5); expect(query).toBeDefined(); expect(typeof query.exec).toBe('function'); }); - it('should allow complex method chaining', () => { + it('should allow complex method chaining', async () => { const query = db.nodes('Job') .where({ status: 'active' }) .connectedTo('Company', 'POSTED_BY') @@ -114,14 +114,14 @@ describe('NodeQuery', () => { .offset(0); expect(query).toBeDefined(); - const results = query.exec(); + const results = await query.exec(); expect(Array.isArray(results)).toBe(true); }); }); - describe('where() filtering', () => { - it('should filter nodes by single property', () => { - const results = db.nodes('Job') + describe('where() filtering', async () => { + it('should filter nodes by single property', async () => { + const results = await db.nodes('Job') .where({ status: 'active' }) .exec(); @@ -131,9 +131,9 @@ describe('NodeQuery', () => { }); }); - it('should filter nodes by multiple properties (AND logic)', () => { + it('should filter nodes by multiple properties (AND logic)', async () => { // Boolean values need special handling in SQLite - use filter() for complex types - const results = db.nodes('Job') + const results = await db.nodes('Job') .where({ status: 'active' }) .filter(node => node.properties.remote === true) .exec(); @@ -145,8 +145,8 @@ describe('NodeQuery', () => { }); }); - it('should support chained where() calls (AND logic)', () => { - const results = db.nodes('Job') + it('should support chained where() calls (AND logic)', async () => { + const results = await db.nodes('Job') .where({ status: 'active' }) .where({ salary: 150000 }) .exec(); @@ -158,8 +158,8 @@ describe('NodeQuery', () => { }); }); - it('should filter by numeric properties', () => { - const results = db.nodes('Job') + it('should filter by numeric properties', async () => { + const results = await db.nodes('Job') .where({ salary: 150000 }) .exec(); @@ -167,30 +167,30 @@ describe('NodeQuery', () => { expect(results[0].properties.title).toBe('Senior Engineer'); }); - it('should filter by boolean properties using filter()', () => { + it('should filter by boolean properties using filter()', async () => { // Boolean filtering requires filter() since SQLite stores as integers - const results = db.nodes('Job') + const results = await db.nodes('Job') .filter(node => node.properties.remote === false) .exec(); expect(results).toHaveLength(2); }); - it('should return empty array when no matches', () => { - const results = db.nodes('Job') + it('should return empty array when no matches', async () => { + const results = await db.nodes('Job') .where({ status: 'nonexistent' }) .exec(); expect(results).toHaveLength(0); }); - it('should handle nested property filtering', () => { - db.createNode('Job', { + it('should handle nested property filtering', async () => { + await db.createNode('Job', { title: 'Complex Job', details: { location: 'NYC', team: 'Engineering' } }); - const results = db.nodes('Job') + const results = await db.nodes('Job') .where({ title: 'Complex Job' }) .exec(); @@ -199,9 +199,9 @@ describe('NodeQuery', () => { }); }); - describe('filter() custom predicate', () => { - it('should filter with custom JavaScript predicate', () => { - const results = db.nodes('Job') + describe('filter() custom predicate', async () => { + it('should filter with custom JavaScript predicate', async () => { + const results = await db.nodes('Job') .filter(node => node.properties.salary >= 180000) .exec(); @@ -211,8 +211,8 @@ describe('NodeQuery', () => { }); }); - it('should combine where() and filter()', () => { - const results = db.nodes('Job') + it('should combine where() and filter()', async () => { + const results = await db.nodes('Job') .where({ status: 'active' }) .filter(node => node.properties.salary >= 180000) .exec(); @@ -224,8 +224,8 @@ describe('NodeQuery', () => { }); }); - it('should support multiple filter() calls (AND logic)', () => { - const results = db.nodes('Job') + it('should support multiple filter() calls (AND logic)', async () => { + const results = await db.nodes('Job') .filter(node => node.properties.status === 'active') .filter(node => node.properties.remote === true) .filter(node => node.properties.salary >= 150000) @@ -234,8 +234,8 @@ describe('NodeQuery', () => { expect(results).toHaveLength(2); }); - it('should handle complex predicates', () => { - const results = db.nodes('Job') + it('should handle complex predicates', async () => { + const results = await db.nodes('Job') .filter(node => { const salary = node.properties.salary; const title = node.properties.title.toLowerCase(); @@ -251,17 +251,17 @@ describe('NodeQuery', () => { }); }); - describe('connectedTo() relationship queries', () => { - it('should find nodes connected outward', () => { - const results = db.nodes('Job') + describe('connectedTo() relationship queries', async () => { + it('should find nodes connected outward', async () => { + const results = await db.nodes('Job') .connectedTo('Company', 'POSTED_BY', 'out') .exec(); expect(results).toHaveLength(4); }); - it('should find nodes connected inward', () => { - const results = db.nodes('Company') + it('should find nodes connected inward', async () => { + const results = await db.nodes('Company') .connectedTo('Job', 'POSTED_BY', 'in') .exec(); @@ -269,16 +269,16 @@ describe('NodeQuery', () => { expect(results.length).toBeGreaterThanOrEqual(2); }); - it('should default to outward direction', () => { - const results = db.nodes('Job') + it('should default to outward direction', async () => { + const results = await db.nodes('Job') .connectedTo('Company', 'POSTED_BY') .exec(); expect(results).toHaveLength(4); }); - it('should combine connectedTo() with where()', () => { - const results = db.nodes('Job') + it('should combine connectedTo() with where()', async () => { + const results = await db.nodes('Job') .where({ status: 'active' }) .connectedTo('Company', 'POSTED_BY') .exec(); @@ -286,8 +286,8 @@ describe('NodeQuery', () => { expect(results).toHaveLength(3); }); - it('should support multiple connectedTo() calls', () => { - const results = db.nodes('Job') + it('should support multiple connectedTo() calls', async () => { + const results = await db.nodes('Job') .connectedTo('Company', 'POSTED_BY') .connectedTo('Skill', 'REQUIRES') .exec(); @@ -303,10 +303,10 @@ describe('NodeQuery', () => { expect(jobsWithSkills.length).toBeGreaterThan(0); }); - it('should handle jobs with no connections', () => { - db.createNode('Job', { title: 'Orphan Job', status: 'active' }); + it('should handle jobs with no connections', async () => { + await db.createNode('Job', { title: 'Orphan Job', status: 'active' }); - const results = db.nodes('Job') + const results = await db.nodes('Job') .connectedTo('Company', 'POSTED_BY') .exec(); @@ -314,9 +314,9 @@ describe('NodeQuery', () => { }); }); - describe('notConnectedTo() negative relationship queries', () => { - it('should find nodes NOT connected to specific type', () => { - const orphan = db.createNode('Job', { title: 'Orphan Job', status: 'active' }); + describe('notConnectedTo() negative relationship queries', async () => { + it('should find nodes NOT connected to specific type', async () => { + const orphan = await db.createNode('Job', { title: 'Orphan Job', status: 'active' }); // Note: notConnectedTo() is defined but may not be fully implemented with NOT EXISTS // Test that method exists and returns results (actual behavior may vary) @@ -325,14 +325,14 @@ describe('NodeQuery', () => { expect(typeof query.exec).toBe('function'); // If implementation is complete, orphan should be the only result - const results = query.exec(); + const results = await query.exec(); const hasOrphan = results.some(r => r.id === orphan.id); expect(results.length).toBeGreaterThan(0); }); - it('should combine notConnectedTo() with where()', () => { - db.createNode('Job', { title: 'Orphan Active', status: 'active' }); - db.createNode('Job', { title: 'Orphan Closed', status: 'closed' }); + it('should combine notConnectedTo() with where()', async () => { + await db.createNode('Job', { title: 'Orphan Active', status: 'active' }); + await db.createNode('Job', { title: 'Orphan Closed', status: 'closed' }); // Test method chaining works const query = db.nodes('Job') @@ -340,14 +340,14 @@ describe('NodeQuery', () => { .notConnectedTo('Company', 'POSTED_BY'); expect(query).toBeDefined(); - const results = query.exec(); + const results = await query.exec(); expect(Array.isArray(results)).toBe(true); }); }); - describe('orderBy() sorting', () => { - it('should sort by string property ascending', () => { - const results = db.nodes('Job') + describe('orderBy() sorting', async () => { + it('should sort by string property ascending', async () => { + const results = await db.nodes('Job') .orderBy('title', 'asc') .exec(); @@ -357,8 +357,8 @@ describe('NodeQuery', () => { } }); - it('should sort by string property descending', () => { - const results = db.nodes('Job') + it('should sort by string property descending', async () => { + const results = await db.nodes('Job') .orderBy('title', 'desc') .exec(); @@ -368,8 +368,8 @@ describe('NodeQuery', () => { } }); - it('should sort by numeric property ascending', () => { - const results = db.nodes('Job') + it('should sort by numeric property ascending', async () => { + const results = await db.nodes('Job') .orderBy('salary', 'asc') .exec(); @@ -379,8 +379,8 @@ describe('NodeQuery', () => { } }); - it('should sort by numeric property descending', () => { - const results = db.nodes('Job') + it('should sort by numeric property descending', async () => { + const results = await db.nodes('Job') .orderBy('salary', 'desc') .exec(); @@ -388,8 +388,8 @@ describe('NodeQuery', () => { expect(salaries).toEqual([200000, 180000, 150000, 80000]); }); - it('should default to ascending order', () => { - const results = db.nodes('Job') + it('should default to ascending order', async () => { + const results = await db.nodes('Job') .orderBy('salary') .exec(); @@ -397,8 +397,8 @@ describe('NodeQuery', () => { expect(salaries).toEqual([80000, 150000, 180000, 200000]); }); - it('should combine orderBy() with where()', () => { - const results = db.nodes('Job') + it('should combine orderBy() with where()', async () => { + const results = await db.nodes('Job') .where({ status: 'active' }) .orderBy('salary', 'desc') .exec(); @@ -409,29 +409,29 @@ describe('NodeQuery', () => { }); }); - describe('limit() pagination', () => { - it('should limit number of results', () => { - const results = db.nodes('Job') + describe('limit() pagination', async () => { + it('should limit number of results', async () => { + const results = await db.nodes('Job') .limit(2) .exec(); expect(results).toHaveLength(2); }); - it('should throw error on zero limit', () => { + it('should throw error on zero limit', async () => { expect(() => { db.nodes('Job').limit(0); }).toThrow('Limit must be a positive integer'); }); - it('should throw error on negative limit', () => { + it('should throw error on negative limit', async () => { expect(() => { db.nodes('Job').limit(-5); }).toThrow('Limit must be a positive integer'); }); - it('should combine limit() with orderBy()', () => { - const results = db.nodes('Job') + it('should combine limit() with orderBy()', async () => { + const results = await db.nodes('Job') .orderBy('salary', 'desc') .limit(2) .exec(); @@ -441,8 +441,8 @@ describe('NodeQuery', () => { expect(results[1].properties.salary).toBe(180000); }); - it('should handle limit larger than result set', () => { - const results = db.nodes('Job') + it('should handle limit larger than result set', async () => { + const results = await db.nodes('Job') .limit(100) .exec(); @@ -450,14 +450,14 @@ describe('NodeQuery', () => { }); }); - describe('offset() pagination', () => { - it('should skip results with offset (requires limit)', () => { - const allResults = db.nodes('Job') + describe('offset() pagination', async () => { + it('should skip results with offset (requires limit)', async () => { + const allResults = await db.nodes('Job') .orderBy('salary', 'desc') .exec(); // SQLite requires LIMIT when using OFFSET - const offsetResults = db.nodes('Job') + const offsetResults = await db.nodes('Job') .orderBy('salary', 'desc') .limit(10) .offset(2) @@ -467,8 +467,8 @@ describe('NodeQuery', () => { expect(offsetResults[0].id).toBe(allResults[2].id); }); - it('should accept zero offset', () => { - const results = db.nodes('Job') + it('should accept zero offset', async () => { + const results = await db.nodes('Job') .limit(10) .offset(0) .exec(); @@ -476,20 +476,20 @@ describe('NodeQuery', () => { expect(results).toHaveLength(4); }); - it('should throw error on negative offset', () => { + it('should throw error on negative offset', async () => { expect(() => { db.nodes('Job').offset(-1); }).toThrow('Offset must be a non-negative integer'); }); - it('should combine offset() and limit() for pagination', () => { - const page1 = db.nodes('Job') + it('should combine offset() and limit() for pagination', async () => { + const page1 = await db.nodes('Job') .orderBy('salary', 'desc') .limit(2) .offset(0) .exec(); - const page2 = db.nodes('Job') + const page2 = await db.nodes('Job') .orderBy('salary', 'desc') .limit(2) .offset(2) @@ -505,8 +505,8 @@ describe('NodeQuery', () => { expect(salariesPage2).toEqual([150000, 80000]); }); - it('should handle offset beyond result set', () => { - const results = db.nodes('Job') + it('should handle offset beyond result set', async () => { + const results = await db.nodes('Job') .limit(10) .offset(100) .exec(); @@ -515,9 +515,9 @@ describe('NodeQuery', () => { }); }); - describe('exec() query execution', () => { - it('should execute query and return nodes', () => { - const results = db.nodes('Job').exec(); + describe('exec() query execution', async () => { + it('should execute query and return nodes', async () => { + const results = await db.nodes('Job').exec(); expect(Array.isArray(results)).toBe(true); expect(results.length).toBeGreaterThan(0); @@ -531,8 +531,8 @@ describe('NodeQuery', () => { }); }); - it('should return nodes with proper types', () => { - const results = db.nodes('Job').exec(); + it('should return nodes with proper types', async () => { + const results = await db.nodes('Job').exec(); results.forEach(node => { expect(typeof node.id).toBe('number'); @@ -543,14 +543,14 @@ describe('NodeQuery', () => { }); }); - it('should handle queries with no results', () => { - const results = db.nodes('NonExistentType').exec(); + it('should handle queries with no results', async () => { + const results = await db.nodes('NonExistentType').exec(); expect(results).toEqual([]); }); - it('should execute complex queries correctly', () => { - const results = db.nodes('Job') + it('should execute complex queries correctly', async () => { + const results = await db.nodes('Job') .where({ status: 'active' }) .connectedTo('Company', 'POSTED_BY') .orderBy('salary', 'desc') @@ -563,9 +563,9 @@ describe('NodeQuery', () => { }); }); - describe('first() single result', () => { - it('should return first matching node', () => { - const result = db.nodes('Job') + describe('first() single result', async () => { + it('should return first matching node', async () => { + const result = await db.nodes('Job') .orderBy('salary', 'desc') .first(); @@ -573,16 +573,16 @@ describe('NodeQuery', () => { expect(result?.properties.salary).toBe(200000); }); - it('should return null when no matches', () => { - const result = db.nodes('Job') + it('should return null when no matches', async () => { + const result = await db.nodes('Job') .where({ status: 'nonexistent' }) .first(); expect(result).toBeNull(); }); - it('should work with where() filtering', () => { - const result = db.nodes('Job') + it('should work with where() filtering', async () => { + const result = await db.nodes('Job') .where({ status: 'closed' }) .first(); @@ -590,42 +590,42 @@ describe('NodeQuery', () => { expect(result?.properties.status).toBe('closed'); }); - it('should not affect subsequent queries', () => { + it('should not affect subsequent queries', async () => { const query = db.nodes('Job').orderBy('salary', 'desc'); - const first = query.first(); - const all = query.exec(); + const first = await query.first(); + const all = await query.exec(); expect(first).not.toBeNull(); expect(all).toHaveLength(4); }); }); - describe('count() aggregation', () => { - it('should count all nodes of type', () => { - const count = db.nodes('Job').count(); + describe('count() aggregation', async () => { + it('should count all nodes of type', async () => { + const count = await db.nodes('Job').count(); expect(count).toBe(4); }); - it('should count filtered results', () => { - const count = db.nodes('Job') + it('should count filtered results', async () => { + const count = await db.nodes('Job') .where({ status: 'active' }) .count(); expect(count).toBe(3); }); - it('should return 0 for no matches', () => { - const count = db.nodes('Job') + it('should return 0 for no matches', async () => { + const count = await db.nodes('Job') .where({ status: 'nonexistent' }) .count(); expect(count).toBe(0); }); - it('should count with connectedTo() filtering', () => { - const count = db.nodes('Job') + it('should count with connectedTo() filtering', async () => { + const count = await db.nodes('Job') .connectedTo('Skill', 'REQUIRES') .count(); @@ -634,24 +634,24 @@ describe('NodeQuery', () => { expect(count).toBeLessThanOrEqual(4); }); - it('should not be affected by limit/offset', () => { - const countAll = db.nodes('Job').count(); - const countLimited = db.nodes('Job').limit(2).count(); + it('should not be affected by limit/offset', async () => { + const countAll = await db.nodes('Job').count(); + const countLimited = await db.nodes('Job').limit(2).count(); expect(countAll).toBe(countLimited); }); - it('should use COUNT(DISTINCT) for both direction to avoid duplicates', () => { + it('should use COUNT(DISTINCT) for both direction to avoid duplicates', async () => { // Create bidirectional relationship - const person1 = db.createNode('Person', { name: 'Alice' }); - const person2 = db.createNode('Person', { name: 'Bob' }); + const person1 = await db.createNode('Person', { name: 'Alice' }); + const person2 = await db.createNode('Person', { name: 'Bob' }); // Create edges in both directions (simulating bidirectional KNOWS relationship) - db.createEdge(person1.id, 'KNOWS', person2.id); - db.createEdge(person2.id, 'KNOWS', person1.id); + await db.createEdge(person1.id, 'KNOWS', person2.id); + await db.createEdge(person2.id, 'KNOWS', person1.id); // Count with 'both' direction should use DISTINCT to avoid counting duplicates - const count = db.nodes('Person') + const count = await db.nodes('Person') .connectedTo('Person', 'KNOWS', 'both') .count(); @@ -660,35 +660,35 @@ describe('NodeQuery', () => { }); }); - describe('exists() predicate', () => { - it('should return true when nodes exist', () => { - const exists = db.nodes('Job') + describe('exists() predicate', async () => { + it('should return true when nodes exist', async () => { + const exists = await db.nodes('Job') .where({ status: 'active' }) .exists(); expect(exists).toBe(true); }); - it('should return false when no nodes exist', () => { - const exists = db.nodes('Job') + it('should return false when no nodes exist', async () => { + const exists = await db.nodes('Job') .where({ status: 'nonexistent' }) .exists(); expect(exists).toBe(false); }); - it('should work with empty database', () => { + it('should work with empty database', async () => { const emptyDb = new GraphDatabase(':memory:'); - const exists = emptyDb.nodes('Job').exists(); + const exists = await emptyDb.nodes('Job').exists(); expect(exists).toBe(false); - emptyDb.close(); + await emptyDb.close(); }); }); - describe('both() bidirectional relationships', () => { - it('should find nodes connected in either direction', () => { - const results = db.nodes('Job') + describe('both() bidirectional relationships', async () => { + it('should find nodes connected in either direction', async () => { + const results = await db.nodes('Job') .connectedTo('Company', 'POSTED_BY', 'both') .exec(); @@ -696,14 +696,14 @@ describe('NodeQuery', () => { expect(results.length).toBeGreaterThan(0); }); - it('should handle truly bidirectional edges', () => { + it('should handle truly bidirectional edges', async () => { // Create bidirectional relationship - const person1 = db.createNode('Person', { name: 'Alice' }); - const person2 = db.createNode('Person', { name: 'Bob' }); - db.createEdge(person1.id, 'KNOWS', person2.id); - db.createEdge(person2.id, 'KNOWS', person1.id); + const person1 = await db.createNode('Person', { name: 'Alice' }); + const person2 = await db.createNode('Person', { name: 'Bob' }); + await db.createEdge(person1.id, 'KNOWS', person2.id); + await db.createEdge(person2.id, 'KNOWS', person1.id); - const results = db.nodes('Person') + const results = await db.nodes('Person') .connectedTo('Person', 'KNOWS', 'both') .exec(); @@ -711,29 +711,29 @@ describe('NodeQuery', () => { }); }); - describe('Edge cases and error conditions', () => { - it('should handle empty property object in where()', () => { - const results = db.nodes('Job') + describe('Edge cases and error conditions', async () => { + it('should handle empty property object in where()', async () => { + const results = await db.nodes('Job') .where({}) .exec(); expect(results).toHaveLength(4); }); - it('should handle undefined properties gracefully', () => { - const results = db.nodes('Job') + it('should handle undefined properties gracefully', async () => { + const results = await db.nodes('Job') .where({ nonExistentField: 'value' }) .exec(); expect(results).toHaveLength(0); }); - it('should handle null property values', () => { - const job = db.createNode('Job', { title: 'Test', description: null }); + it('should handle null property values', async () => { + const job = await db.createNode('Job', { title: 'Test', description: null }); // SQLite/JSON handling of null in where() may differ from expectations // Test that we can query by title and get the job with null description - const results = db.nodes('Job') + const results = await db.nodes('Job') .where({ title: 'Test' }) .exec(); @@ -741,21 +741,21 @@ describe('NodeQuery', () => { expect(results[0].properties.description).toBeNull(); }); - it('should handle special characters in string properties', () => { - db.createNode('Job', { title: "Engineer's Job", company: 'O"Brien & Co' }); + it('should handle special characters in string properties', async () => { + await db.createNode('Job', { title: "Engineer's Job", company: 'O"Brien & Co' }); - const results = db.nodes('Job') + const results = await db.nodes('Job') .where({ title: "Engineer's Job" }) .exec(); expect(results).toHaveLength(1); }); - it('should handle very long property values', () => { + it('should handle very long property values', async () => { const longString = 'x'.repeat(10000); - db.createNode('Job', { title: 'Long Job', description: longString }); + await db.createNode('Job', { title: 'Long Job', description: longString }); - const results = db.nodes('Job') + const results = await db.nodes('Job') .where({ title: 'Long Job' }) .exec(); @@ -763,8 +763,8 @@ describe('NodeQuery', () => { expect(results[0].properties.description).toBe(longString); }); - it('should handle chaining same method multiple times', () => { - const results = db.nodes('Job') + it('should handle chaining same method multiple times', async () => { + const results = await db.nodes('Job') .where({ status: 'active' }) .where({ salary: 150000 }) .where({ posted: '2025-01-15' }) @@ -774,26 +774,26 @@ describe('NodeQuery', () => { expect(results[0].properties.title).toBe('Senior Engineer'); }); - it('should handle query reuse', () => { + it('should handle query reuse', async () => { const query = db.nodes('Job').where({ status: 'active' }); - const results1 = query.exec(); - const results2 = query.exec(); + const results1 = await query.exec(); + const results2 = await query.exec(); expect(results1).toHaveLength(results2.length); expect(results1[0].id).toBe(results2[0].id); }); }); - describe('Performance and SQL generation', () => { - it('should handle large result sets efficiently', () => { + describe('Performance and SQL generation', async () => { + it('should handle large result sets efficiently', async () => { // Create 1000 nodes for (let i = 0; i < 1000; i++) { - db.createNode('TestNode', { index: i, category: i % 10 }); + await db.createNode('TestNode', { index: i, category: i % 10 }); } const start = Date.now(); - const results = db.nodes('TestNode') + const results = await db.nodes('TestNode') .where({ category: 5 }) .orderBy('index', 'asc') .limit(10) @@ -804,14 +804,14 @@ describe('NodeQuery', () => { expect(duration).toBeLessThan(1000); // Should complete in under 1 second }); - it('should execute distinct queries for both direction', () => { + it('should execute distinct queries for both direction', async () => { // This tests the DISTINCT SQL generation for 'both' direction - const person1 = db.createNode('Person', { name: 'Alice' }); - const person2 = db.createNode('Person', { name: 'Bob' }); - db.createEdge(person1.id, 'KNOWS', person2.id); - db.createEdge(person2.id, 'KNOWS', person1.id); + const person1 = await db.createNode('Person', { name: 'Alice' }); + const person2 = await db.createNode('Person', { name: 'Bob' }); + await db.createEdge(person1.id, 'KNOWS', person2.id); + await db.createEdge(person2.id, 'KNOWS', person1.id); - const results = db.nodes('Person') + const results = await db.nodes('Person') .connectedTo('Person', 'KNOWS', 'both') .exec(); @@ -821,8 +821,8 @@ describe('NodeQuery', () => { expect(ids.length).toBe(uniqueIds.length); }); - it('should handle multiple joins efficiently', () => { - const results = db.nodes('Job') + it('should handle multiple joins efficiently', async () => { + const results = await db.nodes('Job') .connectedTo('Company', 'POSTED_BY') .connectedTo('Skill', 'REQUIRES') .where({ status: 'active' }) diff --git a/tests/unit/PatternQuery.test.ts b/tests/unit/PatternQuery.test.ts index 0d0c86b..7266399 100644 --- a/tests/unit/PatternQuery.test.ts +++ b/tests/unit/PatternQuery.test.ts @@ -8,15 +8,15 @@ import { GraphDatabase } from '../../src/core/Database'; import { PatternQuery } from '../../src/query/PatternQuery'; import { PatternError } from '../../src/types/pattern'; -describe('PatternQuery', () => { +describe('PatternQuery', async () => { let db: GraphDatabase; beforeEach(() => { db = new GraphDatabase(':memory:'); }); - afterEach(() => { - db.close(); + afterEach(async () => { + await db.close(); }); describe('Pattern Builder - Basic Structure', () => { @@ -65,12 +65,12 @@ describe('PatternQuery', () => { }); }); - describe('Simple 2-Hop Pattern', () => { - beforeEach(() => { + describe('Simple 2-Hop Pattern', async () => { + beforeEach(async () => { // Create test data: Person -> WORKS_AT -> Company - const person = db.createNode('Person', { name: 'Alice', age: 30 }); - const company = db.createNode('Company', { name: 'TechCorp' }); - db.createEdge(person.id, 'WORKS_AT', company.id); + const person = await db.createNode('Person', { name: 'Alice', age: 30 }); + const company = await db.createNode('Company', { name: 'TechCorp' }); + await db.createEdge(person.id, 'WORKS_AT', company.id); }); it('should find person connected to company via WORKS_AT', () => { @@ -110,11 +110,11 @@ describe('PatternQuery', () => { }); }); - describe('Direction Handling', () => { - beforeEach(() => { - const job = db.createNode('Job', { title: 'Engineer' }); - const company = db.createNode('Company', { name: 'TechCorp' }); - db.createEdge(job.id, 'POSTED_BY', company.id); + describe('Direction Handling', async () => { + beforeEach(async () => { + const job = await db.createNode('Job', { title: 'Engineer' }); + const company = await db.createNode('Company', { name: 'TechCorp' }); + await db.createEdge(job.id, 'POSTED_BY', company.id); }); it('should traverse edges in "out" direction', () => { @@ -139,10 +139,10 @@ describe('PatternQuery', () => { expect(results[0].job.properties.title).toBe('Engineer'); }); - it('should traverse edges in "both" directions', () => { - const person1 = db.createNode('Person', { name: 'Alice' }); - const person2 = db.createNode('Person', { name: 'Bob' }); - db.createEdge(person1.id, 'KNOWS', person2.id); + it('should traverse edges in "both" directions', async () => { + const person1 = await db.createNode('Person', { name: 'Alice' }); + const person2 = await db.createNode('Person', { name: 'Bob' }); + await db.createEdge(person1.id, 'KNOWS', person2.id); const results = db.pattern() .start('person', 'Person') @@ -156,11 +156,11 @@ describe('PatternQuery', () => { }); }); - describe('Filtering with where()', () => { - beforeEach(() => { - db.createNode('Person', { name: 'Alice', age: 25 }); - db.createNode('Person', { name: 'Bob', age: 35 }); - db.createNode('Person', { name: 'Charlie', age: 45 }); + describe('Filtering with where()', async () => { + beforeEach(async () => { + await db.createNode('Person', { name: 'Alice', age: 25 }); + await db.createNode('Person', { name: 'Bob', age: 35 }); + await db.createNode('Person', { name: 'Charlie', age: 45 }); }); it('should filter with exact match', () => { @@ -213,11 +213,11 @@ describe('PatternQuery', () => { }); }); - describe('Variable Selection with select()', () => { - beforeEach(() => { - const person = db.createNode('Person', { name: 'Alice' }); - const company = db.createNode('Company', { name: 'TechCorp' }); - db.createEdge(person.id, 'WORKS_AT', company.id); + describe('Variable Selection with select()', async () => { + beforeEach(async () => { + const person = await db.createNode('Person', { name: 'Alice' }); + const company = await db.createNode('Company', { name: 'TechCorp' }); + await db.createEdge(person.id, 'WORKS_AT', company.id); }); it('should return all variables when select() not called', () => { @@ -246,10 +246,10 @@ describe('PatternQuery', () => { }); }); - describe('Pagination and Ordering', () => { - beforeEach(() => { + describe('Pagination and Ordering', async () => { + beforeEach(async () => { for (let i = 1; i <= 10; i++) { - db.createNode('Person', { name: `Person${i}`, age: 20 + i }); + await db.createNode('Person', { name: `Person${i}`, age: 20 + i }); } }); @@ -291,10 +291,10 @@ describe('PatternQuery', () => { }); }); - describe('Helper Methods', () => { - beforeEach(() => { - db.createNode('Person', { name: 'Alice' }); - db.createNode('Person', { name: 'Bob' }); + describe('Helper Methods', async () => { + beforeEach(async () => { + await db.createNode('Person', { name: 'Alice' }); + await db.createNode('Person', { name: 'Bob' }); }); it('should support first() to return single result', () => { @@ -333,15 +333,15 @@ describe('PatternQuery', () => { }); }); - describe('Multi-Hop Patterns', () => { - beforeEach(() => { + describe('Multi-Hop Patterns', async () => { + beforeEach(async () => { // Create: Person -> KNOWS -> Friend -> WORKS_AT -> Company - const person = db.createNode('Person', { name: 'Alice' }); - const friend = db.createNode('Person', { name: 'Bob' }); - const company = db.createNode('Company', { name: 'TechCorp' }); + const person = await db.createNode('Person', { name: 'Alice' }); + const friend = await db.createNode('Person', { name: 'Bob' }); + const company = await db.createNode('Company', { name: 'TechCorp' }); - db.createEdge(person.id, 'KNOWS', friend.id); - db.createEdge(friend.id, 'WORKS_AT', company.id); + await db.createEdge(person.id, 'KNOWS', friend.id); + await db.createEdge(friend.id, 'WORKS_AT', company.id); }); it('should handle 3-hop pattern', () => { @@ -362,14 +362,14 @@ describe('PatternQuery', () => { }); }); - describe('Cyclic Patterns', () => { - it('should detect mutual relationships', () => { - const alice = db.createNode('Person', { name: 'Alice' }); - const bob = db.createNode('Person', { name: 'Bob' }); + describe('Cyclic Patterns', async () => { + it('should detect mutual relationships', async () => { + const alice = await db.createNode('Person', { name: 'Alice' }); + const bob = await db.createNode('Person', { name: 'Bob' }); // Mutual recommendations - db.createEdge(alice.id, 'RECOMMENDS', bob.id); - db.createEdge(bob.id, 'RECOMMENDS', alice.id); + await db.createEdge(alice.id, 'RECOMMENDS', bob.id); + await db.createEdge(bob.id, 'RECOMMENDS', alice.id); const results = db.pattern() .start('personA', 'Person') diff --git a/tests/unit/Transaction.test.ts b/tests/unit/Transaction.test.ts index 8b0b696..41bf9f0 100644 --- a/tests/unit/Transaction.test.ts +++ b/tests/unit/Transaction.test.ts @@ -3,7 +3,7 @@ import { GraphDatabase } from '../../src/core/Database'; import { TransactionContext } from '../../src/core/Transaction'; import * as fs from 'fs'; -describe('TransactionContext', () => { +describe('TransactionContext', async () => { let db: GraphDatabase; const testDbPath = ':memory:'; @@ -11,140 +11,130 @@ describe('TransactionContext', () => { db = new GraphDatabase(testDbPath); }); - afterEach(() => { - db.close(); + afterEach(async () => { + await db.close(); }); - describe('Automatic commit/rollback', () => { - it('should automatically commit successful transactions', () => { + describe('Automatic commit/rollback', async () => { + it('should automatically commit successful transactions', async () => { let contextReceived: TransactionContext | null = null; - db.transaction((ctx) => { + await db.transaction(async (ctx) => { contextReceived = ctx; - db.createNode('Job', { title: 'Test Job' }); + await db.createNode('Job', { title: 'Test Job' }); }); expect(contextReceived).toBeInstanceOf(TransactionContext); // Verify node was committed - const jobs = db.nodes('Job').exec(); + const jobs = await db.nodes('Job').exec(); expect(jobs).toHaveLength(1); expect(jobs[0].properties.title).toBe('Test Job'); }); - it('should automatically rollback on error', () => { - expect(() => { - db.transaction((ctx) => { - db.createNode('Job', { title: 'Test Job' }); + it('should automatically rollback on error', async () => { + await expect(db.transaction(async (ctx) => { + await db.createNode('Job', { title: 'Test Job' }); throw new Error('Intentional error'); - }); - }).toThrow('Intentional error'); + })).rejects.toThrow('Intentional error'); // Verify node was NOT committed - const jobs = db.nodes('Job').exec(); + const jobs = await db.nodes('Job').exec(); expect(jobs).toHaveLength(0); }); }); - describe('Manual commit', () => { - it('should allow manual commit with ctx.commit()', () => { - db.transaction((ctx) => { - db.createNode('Job', { title: 'Job 1' }); + describe('Manual commit', async () => { + it('should allow manual commit with ctx.commit()', async () => { + await db.transaction(async (ctx) => { + await db.createNode('Job', { title: 'Job 1' }); ctx.commit(); // Transaction is now finalized }); - const jobs = db.nodes('Job').exec(); + const jobs = await db.nodes('Job').exec(); expect(jobs).toHaveLength(1); }); - it('should throw error if commit() called twice', () => { - expect(() => { - db.transaction((ctx) => { - db.createNode('Job', { title: 'Job 1' }); + it('should throw error if commit() called twice', async () => { + await expect(db.transaction(async (ctx) => { + await db.createNode('Job', { title: 'Job 1' }); ctx.commit(); ctx.commit(); // Second commit should fail - }); - }).toThrow('Transaction already finalized'); + })).rejects.toThrow('Transaction already finalized'); }); - it('should throw error if commit() called after rollback()', () => { - expect(() => { - db.transaction((ctx) => { - db.createNode('Job', { title: 'Job 1' }); + it('should throw error if commit() called after rollback()', async () => { + await expect(db.transaction(async (ctx) => { + await db.createNode('Job', { title: 'Job 1' }); ctx.rollback(); ctx.commit(); // Should fail - }); - }).toThrow('Transaction already finalized'); + })).rejects.toThrow('Transaction already finalized'); }); }); - describe('Manual rollback', () => { - it('should allow manual rollback with ctx.rollback()', () => { - db.transaction((ctx) => { - db.createNode('Job', { title: 'Job 1' }); + describe('Manual rollback', async () => { + it('should allow manual rollback with ctx.rollback()', async () => { + await db.transaction(async (ctx) => { + await db.createNode('Job', { title: 'Job 1' }); ctx.rollback(); // Transaction rolled back, no automatic commit }); - const jobs = db.nodes('Job').exec(); + const jobs = await db.nodes('Job').exec(); expect(jobs).toHaveLength(0); }); - it('should throw error if rollback() called twice', () => { - expect(() => { - db.transaction((ctx) => { - db.createNode('Job', { title: 'Job 1' }); + it('should throw error if rollback() called twice', async () => { + await expect(db.transaction(async (ctx) => { + await db.createNode('Job', { title: 'Job 1' }); ctx.rollback(); ctx.rollback(); // Second rollback should fail - }); - }).toThrow('Transaction already finalized'); + })).rejects.toThrow('Transaction already finalized'); }); - it('should throw error if rollback() called after commit()', () => { - expect(() => { - db.transaction((ctx) => { - db.createNode('Job', { title: 'Job 1' }); + it('should throw error if rollback() called after commit()', async () => { + await expect(db.transaction(async (ctx) => { + await db.createNode('Job', { title: 'Job 1' }); ctx.commit(); ctx.rollback(); // Should fail - }); - }).toThrow('Transaction already finalized'); + })).rejects.toThrow('Transaction already finalized'); }); }); - describe('Savepoints', () => { - it('should create and rollback to savepoints', () => { - db.transaction((ctx) => { - db.createNode('Job', { title: 'Job 1' }); + describe('Savepoints', async () => { + it('should create and rollback to savepoints', async () => { + await db.transaction(async (ctx) => { + await db.createNode('Job', { title: 'Job 1' }); ctx.savepoint('sp1'); - db.createNode('Job', { title: 'Job 2' }); + await db.createNode('Job', { title: 'Job 2' }); ctx.savepoint('sp2'); - db.createNode('Job', { title: 'Job 3' }); + await db.createNode('Job', { title: 'Job 3' }); // Rollback to sp2 - Job 3 should disappear ctx.rollbackTo('sp2'); - db.createNode('Job', { title: 'Job 4' }); + await db.createNode('Job', { title: 'Job 4' }); // Rollback to sp1 - Job 2 and Job 4 should disappear ctx.rollbackTo('sp1'); - db.createNode('Job', { title: 'Job 5' }); + await db.createNode('Job', { title: 'Job 5' }); }); - const jobs = db.nodes('Job').exec(); + const jobs = await db.nodes('Job').exec(); expect(jobs).toHaveLength(2); expect(jobs.map(j => j.properties.title).sort()).toEqual(['Job 1', 'Job 5']); }); - it('should release savepoints', () => { - db.transaction((ctx) => { - db.createNode('Job', { title: 'Job 1' }); + it('should release savepoints', async () => { + await db.transaction(async (ctx) => { + await db.createNode('Job', { title: 'Job 1' }); ctx.savepoint('sp1'); - db.createNode('Job', { title: 'Job 2' }); + await db.createNode('Job', { title: 'Job 2' }); ctx.releaseSavepoint('sp1'); // Can't rollback to released savepoint @@ -153,36 +143,36 @@ describe('TransactionContext', () => { }).toThrow(); }); - const jobs = db.nodes('Job').exec(); + const jobs = await db.nodes('Job').exec(); expect(jobs).toHaveLength(2); }); - it('should handle nested savepoints correctly', () => { - db.transaction((ctx) => { - db.createNode('Job', { title: 'Job 1' }); + it('should handle nested savepoints correctly', async () => { + await db.transaction(async (ctx) => { + await db.createNode('Job', { title: 'Job 1' }); ctx.savepoint('outer'); - db.createNode('Job', { title: 'Job 2' }); + await db.createNode('Job', { title: 'Job 2' }); ctx.savepoint('inner'); - db.createNode('Job', { title: 'Job 3' }); + await db.createNode('Job', { title: 'Job 3' }); // Rollback inner only ctx.rollbackTo('inner'); - db.createNode('Job', { title: 'Job 4' }); + await db.createNode('Job', { title: 'Job 4' }); }); - const jobs = db.nodes('Job').exec(); + const jobs = await db.nodes('Job').exec(); expect(jobs).toHaveLength(3); expect(jobs.map(j => j.properties.title).sort()).toEqual(['Job 1', 'Job 2', 'Job 4']); }); }); - describe('Return values', () => { - it('should return value from transaction function', () => { - const result = db.transaction((ctx) => { - const job = db.createNode('Job', { title: 'Test Job' }); + describe('Return values', async () => { + it('should return value from transaction function', async () => { + const result = await db.transaction(async (ctx) => { + const job = await db.createNode('Job', { title: 'Test Job' }); return job.id; }); @@ -190,15 +180,15 @@ describe('TransactionContext', () => { expect(result).toBeGreaterThan(0); }); - it('should return complex objects from transaction', () => { + it('should return complex objects from transaction', async () => { interface Result { jobId: number; companyId: number; } - const result = db.transaction((ctx) => { - const job = db.createNode('Job', { title: 'Test Job' }); - const company = db.createNode('Company', { name: 'TestCo' }); + const result = await db.transaction(async (ctx) => { + const job = await db.createNode('Job', { title: 'Test Job' }); + const company = await db.createNode('Company', { name: 'TestCo' }); return { jobId: job.id, companyId: company.id }; }); @@ -209,12 +199,12 @@ describe('TransactionContext', () => { }); }); - describe('Error handling', () => { - it('should preserve original error message and stack', () => { + describe('Error handling', async () => { + it('should preserve original error message and stack', async () => { const originalError = new Error('Original error'); try { - db.transaction((ctx) => { + await db.transaction(async (ctx) => { throw originalError; }); fail('Should have thrown'); @@ -224,44 +214,42 @@ describe('TransactionContext', () => { } }); - it('should handle errors after savepoint creation', () => { - expect(() => { - db.transaction((ctx) => { - db.createNode('Job', { title: 'Job 1' }); + it('should handle errors after savepoint creation', async () => { + await expect(db.transaction(async (ctx) => { + await db.createNode('Job', { title: 'Job 1' }); ctx.savepoint('sp1'); - db.createNode('Job', { title: 'Job 2' }); + await db.createNode('Job', { title: 'Job 2' }); throw new Error('Error after savepoint'); - }); - }).toThrow('Error after savepoint'); + })).rejects.toThrow('Error after savepoint'); // All changes should be rolled back - const jobs = db.nodes('Job').exec(); + const jobs = await db.nodes('Job').exec(); expect(jobs).toHaveLength(0); }); }); - describe('Edge cases', () => { - it('should handle empty transaction', () => { - const result = db.transaction((ctx) => { + describe('Edge cases', async () => { + it('should handle empty transaction', async () => { + const result = await db.transaction(async (ctx) => { return 42; }); expect(result).toBe(42); }); - it('should handle transaction with only reads', () => { - db.createNode('Job', { title: 'Existing Job' }); + it('should handle transaction with only reads', async () => { + await db.createNode('Job', { title: 'Existing Job' }); - const result = db.transaction((ctx) => { - const jobs = db.nodes('Job').exec(); + const result = await db.transaction(async (ctx) => { + const jobs = await db.nodes('Job').exec(); return jobs.length; }); expect(result).toBe(1); }); - it('should not allow savepoint with duplicate name', () => { - db.transaction((ctx) => { + it('should not allow savepoint with duplicate name', async () => { + await db.transaction(async (ctx) => { ctx.savepoint('sp1'); expect(() => { ctx.savepoint('sp1'); @@ -269,16 +257,16 @@ describe('TransactionContext', () => { }); }); - it('should throw on rollbackTo non-existent savepoint', () => { - db.transaction((ctx) => { + it('should throw on rollbackTo non-existent savepoint', async () => { + await db.transaction(async (ctx) => { expect(() => { ctx.rollbackTo('nonexistent'); }).toThrow('Savepoint nonexistent does not exist'); }); }); - it('should throw on release non-existent savepoint', () => { - db.transaction((ctx) => { + it('should throw on release non-existent savepoint', async () => { + await db.transaction(async (ctx) => { expect(() => { ctx.releaseSavepoint('nonexistent'); }).toThrow('Savepoint nonexistent does not exist'); diff --git a/tests/unit/TraversalQuery-paths.test.ts b/tests/unit/TraversalQuery-paths.test.ts index 8ed818b..33eaad3 100644 --- a/tests/unit/TraversalQuery-paths.test.ts +++ b/tests/unit/TraversalQuery-paths.test.ts @@ -1,41 +1,41 @@ import { describe, it, expect, beforeEach, afterEach } from '@jest/globals'; import { GraphDatabase } from '../../src/core/Database'; -describe('TraversalQuery - paths() wrapper', () => { +describe('TraversalQuery - paths() wrapper', async () => { let db: GraphDatabase; - beforeEach(() => { + beforeEach(async () => { db = new GraphDatabase(':memory:'); // Create a simple graph: // A -> B -> C // A -> D -> C // A -> E - const a = db.createNode('Node', { name: 'A' }); - const b = db.createNode('Node', { name: 'B' }); - const c = db.createNode('Node', { name: 'C' }); - const d = db.createNode('Node', { name: 'D' }); - const e = db.createNode('Node', { name: 'E' }); - - db.createEdge(a.id, 'LINKS', b.id); - db.createEdge(b.id, 'LINKS', c.id); - db.createEdge(a.id, 'LINKS', d.id); - db.createEdge(d.id, 'LINKS', c.id); - db.createEdge(a.id, 'LINKS', e.id); + const a = await db.createNode('Node', { name: 'A' }); + const b = await db.createNode('Node', { name: 'B' }); + const c = await db.createNode('Node', { name: 'C' }); + const d = await db.createNode('Node', { name: 'D' }); + const e = await db.createNode('Node', { name: 'E' }); + + await db.createEdge(a.id, 'LINKS', b.id); + await db.createEdge(b.id, 'LINKS', c.id); + await db.createEdge(a.id, 'LINKS', d.id); + await db.createEdge(d.id, 'LINKS', c.id); + await db.createEdge(a.id, 'LINKS', e.id); // Store node IDs for tests (db as any).testNodeIds = { a: a.id, b: b.id, c: c.id, d: d.id, e: e.id }; }); - afterEach(() => { - db.close(); + afterEach(async () => { + await db.close(); }); - it('should return all paths to target node without options', () => { + it('should return all paths to target node without options', async () => { const ids = (db as any).testNodeIds; // Find all paths from A to C - const paths = db.traverse(ids.a) + const paths = await db.traverse(ids.a) .out('LINKS') .paths(ids.c); @@ -54,11 +54,11 @@ describe('TraversalQuery - paths() wrapper', () => { }); }); - it('should limit maxPaths when option provided', () => { + it('should limit maxPaths when option provided', async () => { const ids = (db as any).testNodeIds; // Limit to 1 path - const paths = db.traverse(ids.a) + const paths = await db.traverse(ids.a) .out('LINKS') .paths(ids.c, { maxPaths: 1 }); @@ -66,33 +66,33 @@ describe('TraversalQuery - paths() wrapper', () => { expect(paths[0][paths[0].length - 1].id).toBe(ids.c); }); - it('should respect maxDepth option', () => { + it('should respect maxDepth option', async () => { const ids = (db as any).testNodeIds; // With maxDepth 1, should not find C (requires 2 hops) - const paths = db.traverse(ids.a) + const paths = await db.traverse(ids.a) .out('LINKS') .paths(ids.c, { maxDepth: 1 }); expect(paths.length).toBe(0); // With maxDepth 2, should find both paths to C - const paths2 = db.traverse(ids.a) + const paths2 = await db.traverse(ids.a) .out('LINKS') .paths(ids.c, { maxDepth: 2 }); expect(paths2.length).toBe(2); }); - it('should use toPaths() when no maxPaths specified', () => { + it('should use toPaths() when no maxPaths specified', async () => { const ids = (db as any).testNodeIds; // paths() without options should behave like toPaths() - const pathsResult = db.traverse(ids.a) + const pathsResult = await db.traverse(ids.a) .out('LINKS') .paths(ids.c); - const toPathsResult = db.traverse(ids.a) + const toPathsResult = await db.traverse(ids.a) .out('LINKS') .toPaths(); @@ -105,15 +105,15 @@ describe('TraversalQuery - paths() wrapper', () => { expect(pathsResult.length).toBe(toPathsFiltered.length); }); - it('should use allPaths() when maxPaths is specified', () => { + it('should use allPaths() when maxPaths is specified', async () => { const ids = (db as any).testNodeIds; // paths() with maxPaths should behave like allPaths() - const pathsResult = db.traverse(ids.a) + const pathsResult = await db.traverse(ids.a) .out('LINKS') .paths(ids.c, { maxPaths: 5 }); - const allPathsResult = db.traverse(ids.a) + const allPathsResult = await db.traverse(ids.a) .out('LINKS') .allPaths(ids.c, 5); @@ -121,11 +121,11 @@ describe('TraversalQuery - paths() wrapper', () => { expect(pathsResult.length).toBe(allPathsResult.length); }); - it('should apply maxDepth before finding paths', () => { + it('should apply maxDepth before finding paths', async () => { const ids = (db as any).testNodeIds; // Set maxDepth on traversal, then call paths() - const paths = db.traverse(ids.a) + const paths = await db.traverse(ids.a) .out('LINKS') .maxDepth(1) .paths(ids.c); @@ -134,11 +134,11 @@ describe('TraversalQuery - paths() wrapper', () => { expect(paths.length).toBe(0); }); - it('should override traversal maxDepth with options.maxDepth', () => { + it('should override traversal maxDepth with options.maxDepth', async () => { const ids = (db as any).testNodeIds; // Set maxDepth on traversal to 1, but override with options.maxDepth 2 - const paths = db.traverse(ids.a) + const paths = await db.traverse(ids.a) .out('LINKS') .maxDepth(1) .paths(ids.c, { maxDepth: 2 }); @@ -147,29 +147,29 @@ describe('TraversalQuery - paths() wrapper', () => { expect(paths.length).toBe(2); }); - it('should return empty array when no paths exist', () => { + it('should return empty array when no paths exist', async () => { const ids = (db as any).testNodeIds; // Create disconnected node - const isolated = db.createNode('Node', { name: 'Isolated' }); + const isolated = await db.createNode('Node', { name: 'Isolated' }); - const paths = db.traverse(ids.a) + const paths = await db.traverse(ids.a) .out('LINKS') .paths(isolated.id); expect(paths).toEqual([]); }); - it('should work with different edge types', () => { + it('should work with different edge types', async () => { // Create new graph with different edge types - const x = db.createNode('Node', { name: 'X' }); - const y = db.createNode('Node', { name: 'Y' }); - const z = db.createNode('Node', { name: 'Z' }); + const x = await db.createNode('Node', { name: 'X' }); + const y = await db.createNode('Node', { name: 'Y' }); + const z = await db.createNode('Node', { name: 'Z' }); - db.createEdge(x.id, 'TYPE_A', y.id); - db.createEdge(y.id, 'TYPE_A', z.id); + await db.createEdge(x.id, 'TYPE_A', y.id); + await db.createEdge(y.id, 'TYPE_A', z.id); - const paths = db.traverse(x.id) + const paths = await db.traverse(x.id) .out('TYPE_A') .paths(z.id); @@ -177,11 +177,11 @@ describe('TraversalQuery - paths() wrapper', () => { expect(paths[0][paths[0].length - 1].id).toBe(z.id); }); - it('should handle self-referencing paths', () => { - const self = db.createNode('Node', { name: 'Self' }); - db.createEdge(self.id, 'LINKS', self.id); + it('should handle self-referencing paths', async () => { + const self = await db.createNode('Node', { name: 'Self' }); + await db.createEdge(self.id, 'LINKS', self.id); - const paths = db.traverse(self.id) + const paths = await db.traverse(self.id) .out('LINKS') .paths(self.id); @@ -190,11 +190,11 @@ describe('TraversalQuery - paths() wrapper', () => { expect(paths.length).toBe(0); }); - it('should combine with other traversal methods', () => { + it('should combine with other traversal methods', async () => { const ids = (db as any).testNodeIds; // Use filter before paths() - const paths = db.traverse(ids.a) + const paths = await db.traverse(ids.a) .out('LINKS') .filter(node => node.properties.name !== 'E') .paths(ids.c); diff --git a/tests/unit/TraversalQuery.test.ts b/tests/unit/TraversalQuery.test.ts index 3768e93..48b3b06 100644 --- a/tests/unit/TraversalQuery.test.ts +++ b/tests/unit/TraversalQuery.test.ts @@ -6,7 +6,7 @@ import { Node } from '../../src/types'; * Comprehensive test suite for TraversalQuery class * Tests all traversal methods, path finding, and cycle detection */ -describe('TraversalQuery', () => { +describe('TraversalQuery', async () => { let db: GraphDatabase; let testNodeIds: { a: number; @@ -33,26 +33,26 @@ describe('TraversalQuery', () => { * - D connects to G (out) * - F connects back to C (creates cycle) */ - beforeEach(() => { + beforeEach(async () => { db = new GraphDatabase(':memory:'); - const a = db.createNode('Node', { name: 'A', level: 0 }); - const b = db.createNode('Node', { name: 'B', level: 1 }); - const c = db.createNode('Node', { name: 'C', level: 1 }); - const d = db.createNode('Node', { name: 'D', level: 1 }); - const e = db.createNode('Node', { name: 'E', level: 2 }); - const f = db.createNode('Node', { name: 'F', level: 2 }); + const a = await db.createNode('Node', { name: 'A', level: 0 }); + const b = await db.createNode('Node', { name: 'B', level: 1 }); + const c = await db.createNode('Node', { name: 'C', level: 1 }); + const d = await db.createNode('Node', { name: 'D', level: 1 }); + const e = await db.createNode('Node', { name: 'E', level: 2 }); + const f = await db.createNode('Node', { name: 'F', level: 2 }); // Create edges - db.createEdge(a.id, 'LINKS', b.id); - db.createEdge(a.id, 'LINKS', c.id); - db.createEdge(a.id, 'LINKS', d.id); - db.createEdge(b.id, 'LINKS', e.id); - db.createEdge(c.id, 'LINKS', e.id); - db.createEdge(c.id, 'LINKS', f.id); + await db.createEdge(a.id, 'LINKS', b.id); + await db.createEdge(a.id, 'LINKS', c.id); + await db.createEdge(a.id, 'LINKS', d.id); + await db.createEdge(b.id, 'LINKS', e.id); + await db.createEdge(c.id, 'LINKS', e.id); + await db.createEdge(c.id, 'LINKS', f.id); // Create cycle: F -> C - db.createEdge(f.id, 'LINKS', c.id); + await db.createEdge(f.id, 'LINKS', c.id); // Store for tests testNodeIds = { @@ -65,13 +65,13 @@ describe('TraversalQuery', () => { }; }); - afterEach(() => { - db.close(); + afterEach(async () => { + await db.close(); }); - describe('out() - Outgoing Traversal', () => { - it('should traverse outgoing edges from start node', () => { - const nodes = db.traverse(testNodeIds.a) + describe('out() - Outgoing Traversal', async () => { + it('should traverse outgoing edges from start node', async () => { + const nodes = await db.traverse(testNodeIds.a) .out('LINKS') .maxDepth(1) .toArray(); @@ -81,12 +81,12 @@ describe('TraversalQuery', () => { expect(nodeNames).toEqual(['B', 'C', 'D']); }); - it('should respect node type filter in out()', () => { + it('should respect node type filter in out()', async () => { // Add a different type node - const special = db.createNode('Special', { name: 'Special' }); - db.createEdge(testNodeIds.a, 'LINKS', special.id); + const special = await db.createNode('Special', { name: 'Special' }); + await db.createEdge(testNodeIds.a, 'LINKS', special.id); - const nodes = db.traverse(testNodeIds.a) + const nodes = await db.traverse(testNodeIds.a) .out('LINKS', 'Node') .maxDepth(1) .toArray(); @@ -96,8 +96,8 @@ describe('TraversalQuery', () => { expect(nodes.every(n => n.type === 'Node')).toBe(true); }); - it('should traverse multiple hops with out()', () => { - const nodes = db.traverse(testNodeIds.a) + it('should traverse multiple hops with out()', async () => { + const nodes = await db.traverse(testNodeIds.a) .out('LINKS') .maxDepth(2) .toArray(); @@ -109,10 +109,10 @@ describe('TraversalQuery', () => { expect(nodeNames).toContain('F'); }); - it('should return empty array when no outgoing edges exist', () => { - const isolated = db.createNode('Node', { name: 'Isolated' }); + it('should return empty array when no outgoing edges exist', async () => { + const isolated = await db.createNode('Node', { name: 'Isolated' }); - const nodes = db.traverse(isolated.id) + const nodes = await db.traverse(isolated.id) .out('LINKS') .toArray(); @@ -120,9 +120,9 @@ describe('TraversalQuery', () => { }); }); - describe('in() - Incoming Traversal', () => { - it('should traverse incoming edges', () => { - const nodes = db.traverse(testNodeIds.e) + describe('in() - Incoming Traversal', async () => { + it('should traverse incoming edges', async () => { + const nodes = await db.traverse(testNodeIds.e) .in('LINKS') .maxDepth(1) .toArray(); @@ -133,11 +133,11 @@ describe('TraversalQuery', () => { expect(nodeNames).toEqual(['B', 'C']); }); - it('should respect node type filter in in()', () => { - const special = db.createNode('Special', { name: 'Special' }); - db.createEdge(special.id, 'LINKS', testNodeIds.e); + it('should respect node type filter in in()', async () => { + const special = await db.createNode('Special', { name: 'Special' }); + await db.createEdge(special.id, 'LINKS', testNodeIds.e); - const nodes = db.traverse(testNodeIds.e) + const nodes = await db.traverse(testNodeIds.e) .in('LINKS', 'Node') .maxDepth(1) .toArray(); @@ -147,8 +147,8 @@ describe('TraversalQuery', () => { expect(nodes.every(n => n.type === 'Node')).toBe(true); }); - it('should traverse multiple hops with in()', () => { - const nodes = db.traverse(testNodeIds.e) + it('should traverse multiple hops with in()', async () => { + const nodes = await db.traverse(testNodeIds.e) .in('LINKS') .maxDepth(2) .toArray(); @@ -159,8 +159,8 @@ describe('TraversalQuery', () => { expect(nodeNames).toContain('A'); }); - it('should return empty array when no incoming edges exist', () => { - const nodes = db.traverse(testNodeIds.a) + it('should return empty array when no incoming edges exist', async () => { + const nodes = await db.traverse(testNodeIds.a) .in('LINKS') .toArray(); @@ -168,9 +168,9 @@ describe('TraversalQuery', () => { }); }); - describe('both() - Bidirectional Traversal', () => { - it('should traverse edges in both directions', () => { - const nodes = db.traverse(testNodeIds.c) + describe('both() - Bidirectional Traversal', async () => { + it('should traverse edges in both directions', async () => { + const nodes = await db.traverse(testNodeIds.c) .both('LINKS') .maxDepth(1) .toArray(); @@ -183,12 +183,12 @@ describe('TraversalQuery', () => { expect(nodeNames).toContain('F'); // both directions }); - it('should respect node type filter in both()', () => { - const special = db.createNode('Special', { name: 'Special' }); - db.createEdge(special.id, 'LINKS', testNodeIds.c); - db.createEdge(testNodeIds.c, 'LINKS', special.id); + it('should respect node type filter in both()', async () => { + const special = await db.createNode('Special', { name: 'Special' }); + await db.createEdge(special.id, 'LINKS', testNodeIds.c); + await db.createEdge(testNodeIds.c, 'LINKS', special.id); - const nodes = db.traverse(testNodeIds.c) + const nodes = await db.traverse(testNodeIds.c) .both('LINKS', 'Node') .maxDepth(1) .toArray(); @@ -196,8 +196,8 @@ describe('TraversalQuery', () => { expect(nodes.every(n => n.type === 'Node')).toBe(true); }); - it('should explore wider graph with both()', () => { - const nodes = db.traverse(testNodeIds.b) + it('should explore wider graph with both()', async () => { + const nodes = await db.traverse(testNodeIds.b) .both('LINKS') .maxDepth(2) .toArray(); @@ -207,14 +207,14 @@ describe('TraversalQuery', () => { }); }); - describe('maxDepth() - Depth Limiting', () => { - it('should limit traversal to maxDepth', () => { - const depth1 = db.traverse(testNodeIds.a) + describe('maxDepth() - Depth Limiting', async () => { + it('should limit traversal to maxDepth', async () => { + const depth1 = await db.traverse(testNodeIds.a) .out('LINKS') .maxDepth(1) .toArray(); - const depth2 = db.traverse(testNodeIds.a) + const depth2 = await db.traverse(testNodeIds.a) .out('LINKS') .maxDepth(2) .toArray(); @@ -228,8 +228,8 @@ describe('TraversalQuery', () => { }).toThrow('Max depth must be non-negative'); }); - it('should accept zero maxDepth', () => { - const nodes = db.traverse(testNodeIds.a) + it('should accept zero maxDepth', async () => { + const nodes = await db.traverse(testNodeIds.a) .out('LINKS') .maxDepth(0) .toArray(); @@ -238,8 +238,8 @@ describe('TraversalQuery', () => { expect(nodes).toEqual([]); }); - it('should work with large depth values', () => { - const nodes = db.traverse(testNodeIds.a) + it('should work with large depth values', async () => { + const nodes = await db.traverse(testNodeIds.a) .out('LINKS') .maxDepth(100) .toArray(); @@ -249,9 +249,9 @@ describe('TraversalQuery', () => { }); }); - describe('minDepth() - Minimum Depth', () => { - it('should skip nodes closer than minDepth', () => { - const nodes = db.traverse(testNodeIds.a) + describe('minDepth() - Minimum Depth', async () => { + it('should skip nodes closer than minDepth', async () => { + const nodes = await db.traverse(testNodeIds.a) .out('LINKS') .minDepth(2) .maxDepth(3) @@ -267,8 +267,8 @@ describe('TraversalQuery', () => { }).toThrow('Min depth must be non-negative'); }); - it('should work with minDepth equal to maxDepth', () => { - const nodes = db.traverse(testNodeIds.a) + it('should work with minDepth equal to maxDepth', async () => { + const nodes = await db.traverse(testNodeIds.a) .out('LINKS') .minDepth(1) .maxDepth(1) @@ -280,9 +280,9 @@ describe('TraversalQuery', () => { }); }); - describe('filter() - Predicate Filtering', () => { - it('should filter nodes by property value', () => { - const nodes = db.traverse(testNodeIds.a) + describe('filter() - Predicate Filtering', async () => { + it('should filter nodes by property value', async () => { + const nodes = await db.traverse(testNodeIds.a) .out('LINKS') .filter(node => node.properties.name === 'B') .toArray(); @@ -291,8 +291,8 @@ describe('TraversalQuery', () => { expect(nodes[0].properties.name).toBe('B'); }); - it('should chain multiple filters', () => { - const nodes = db.traverse(testNodeIds.a) + it('should chain multiple filters', async () => { + const nodes = await db.traverse(testNodeIds.a) .out('LINKS') .filter(node => node.properties.level >= 1) .filter(node => node.properties.name !== 'D') @@ -303,8 +303,8 @@ describe('TraversalQuery', () => { expect(nodes.map(n => n.properties.name).sort()).toEqual(['B', 'C']); }); - it('should apply filter across multiple depths', () => { - const nodes = db.traverse(testNodeIds.a) + it('should apply filter across multiple depths', async () => { + const nodes = await db.traverse(testNodeIds.a) .out('LINKS') .filter(node => node.properties.level === 2) .maxDepth(3) @@ -314,8 +314,8 @@ describe('TraversalQuery', () => { expect(nodes.every(n => n.properties.level === 2)).toBe(true); }); - it('should return empty when filter matches nothing', () => { - const nodes = db.traverse(testNodeIds.a) + it('should return empty when filter matches nothing', async () => { + const nodes = await db.traverse(testNodeIds.a) .out('LINKS') .filter(node => node.properties.name === 'Nonexistent') .toArray(); @@ -324,9 +324,9 @@ describe('TraversalQuery', () => { }); }); - describe('unique() - Node Deduplication', () => { - it('should return each node only once', () => { - const nodes = db.traverse(testNodeIds.a) + describe('unique() - Node Deduplication', async () => { + it('should return each node only once', async () => { + const nodes = await db.traverse(testNodeIds.a) .out('LINKS') .unique() .maxDepth(2) @@ -338,9 +338,9 @@ describe('TraversalQuery', () => { expect(nodeIds.length).toBe(uniqueIds.size); }); - it('should deduplicate diamond pattern traversal', () => { + it('should deduplicate diamond pattern traversal', async () => { // E is reachable via B and C - should appear once - const nodes = db.traverse(testNodeIds.a) + const nodes = await db.traverse(testNodeIds.a) .out('LINKS') .unique() .maxDepth(2) @@ -350,8 +350,8 @@ describe('TraversalQuery', () => { expect(eNodes).toHaveLength(1); }); - it('should work without unique() - allow duplicates', () => { - const nodes = db.traverse(testNodeIds.a) + it('should work without unique() - allow duplicates', async () => { + const nodes = await db.traverse(testNodeIds.a) .out('LINKS') .maxDepth(2) .toArray(); @@ -362,9 +362,9 @@ describe('TraversalQuery', () => { }); }); - describe('toArray() - Result Collection', () => { - it('should return array of nodes', () => { - const nodes = db.traverse(testNodeIds.a) + describe('toArray() - Result Collection', async () => { + it('should return array of nodes', async () => { + const nodes = await db.traverse(testNodeIds.a) .out('LINKS') .maxDepth(1) .toArray(); @@ -376,8 +376,8 @@ describe('TraversalQuery', () => { expect(nodes[0]).toHaveProperty('properties'); }); - it('should not include start node in results', () => { - const nodes = db.traverse(testNodeIds.a) + it('should not include start node in results', async () => { + const nodes = await db.traverse(testNodeIds.a) .out('LINKS') .toArray(); @@ -385,28 +385,28 @@ describe('TraversalQuery', () => { expect(hasStartNode).toBe(false); }); - it('should return empty array for isolated node', () => { - const isolated = db.createNode('Node', { name: 'Isolated' }); + it('should return empty array for isolated node', async () => { + const isolated = await db.createNode('Node', { name: 'Isolated' }); - const nodes = db.traverse(isolated.id) + const nodes = await db.traverse(isolated.id) .out('LINKS') .toArray(); expect(nodes).toEqual([]); }); - it('should work with no traversal steps defined', () => { + it('should work with no traversal steps defined', async () => { // Edge case: traverse without defining out/in/both - const nodes = db.traverse(testNodeIds.a).toArray(); + const nodes = await db.traverse(testNodeIds.a).toArray(); // Should return empty - no steps to follow expect(nodes).toEqual([]); }); }); - describe('shortestPath() - Path Finding', () => { - it('should find shortest path between two nodes', () => { - const path = db.traverse(testNodeIds.a) + describe('shortestPath() - Path Finding', async () => { + it('should find shortest path between two nodes', async () => { + const path = await db.traverse(testNodeIds.a) .out('LINKS') .shortestPath(testNodeIds.e); @@ -417,8 +417,8 @@ describe('TraversalQuery', () => { expect(path![path!.length - 1].id).toBe(testNodeIds.e); }); - it('should find direct path when available', () => { - const path = db.traverse(testNodeIds.a) + it('should find direct path when available', async () => { + const path = await db.traverse(testNodeIds.a) .out('LINKS') .shortestPath(testNodeIds.b); @@ -426,19 +426,19 @@ describe('TraversalQuery', () => { expect(path).toHaveLength(2); // A -> B }); - it('should return null when no path exists', () => { - const isolated = db.createNode('Node', { name: 'Isolated' }); + it('should return null when no path exists', async () => { + const isolated = await db.createNode('Node', { name: 'Isolated' }); - const path = db.traverse(testNodeIds.a) + const path = await db.traverse(testNodeIds.a) .out('LINKS') .shortestPath(isolated.id); expect(path).toBeNull(); }); - it('should prefer shorter path in diamond pattern', () => { + it('should prefer shorter path in diamond pattern', async () => { // Both A->B->E and A->C->E exist, both length 3 - const path = db.traverse(testNodeIds.a) + const path = await db.traverse(testNodeIds.a) .out('LINKS') .shortestPath(testNodeIds.e); @@ -446,8 +446,8 @@ describe('TraversalQuery', () => { expect(path).toHaveLength(3); // A -> (B or C) -> E }); - it('should work with both() direction', () => { - const path = db.traverse(testNodeIds.b) + it('should work with both() direction', async () => { + const path = await db.traverse(testNodeIds.b) .both('LINKS') .shortestPath(testNodeIds.f); @@ -456,8 +456,8 @@ describe('TraversalQuery', () => { expect(path!.length).toBeGreaterThanOrEqual(3); }); - it('should return path with start node as first element', () => { - const path = db.traverse(testNodeIds.a) + it('should return path with start node as first element', async () => { + const path = await db.traverse(testNodeIds.a) .out('LINKS') .shortestPath(testNodeIds.e); @@ -466,9 +466,9 @@ describe('TraversalQuery', () => { }); }); - describe('toPaths() - All Paths with Cycle Detection', () => { - it('should return all paths from traversal', () => { - const paths = db.traverse(testNodeIds.a) + describe('toPaths() - All Paths with Cycle Detection', async () => { + it('should return all paths from traversal', async () => { + const paths = await db.traverse(testNodeIds.a) .out('LINKS') .maxDepth(2) .toPaths(); @@ -478,9 +478,9 @@ describe('TraversalQuery', () => { expect(paths[0]).toBeInstanceOf(Array); }); - it('should detect and prevent cycles', () => { + it('should detect and prevent cycles', async () => { // F -> C -> F creates a cycle - const paths = db.traverse(testNodeIds.c) + const paths = await db.traverse(testNodeIds.c) .both('LINKS') .maxDepth(5) .toPaths(); @@ -496,11 +496,11 @@ describe('TraversalQuery', () => { } }); - it('should handle self-referencing nodes', () => { - const self = db.createNode('Node', { name: 'Self' }); - db.createEdge(self.id, 'LINKS', self.id); + it('should handle self-referencing nodes', async () => { + const self = await db.createNode('Node', { name: 'Self' }); + await db.createEdge(self.id, 'LINKS', self.id); - const paths = db.traverse(self.id) + const paths = await db.traverse(self.id) .out('LINKS') .maxDepth(3) .toPaths(); @@ -509,8 +509,8 @@ describe('TraversalQuery', () => { expect(paths).toBeDefined(); }); - it('should include all intermediate nodes in paths', () => { - const paths = db.traverse(testNodeIds.a) + it('should include all intermediate nodes in paths', async () => { + const paths = await db.traverse(testNodeIds.a) .out('LINKS') .maxDepth(2) .toPaths(); @@ -522,13 +522,13 @@ describe('TraversalQuery', () => { } }); - it('should respect maxDepth in toPaths', () => { - const paths1 = db.traverse(testNodeIds.a) + it('should respect maxDepth in toPaths', async () => { + const paths1 = await db.traverse(testNodeIds.a) .out('LINKS') .maxDepth(1) .toPaths(); - const paths2 = db.traverse(testNodeIds.a) + const paths2 = await db.traverse(testNodeIds.a) .out('LINKS') .maxDepth(2) .toPaths(); @@ -536,8 +536,8 @@ describe('TraversalQuery', () => { expect(paths2.length).toBeGreaterThan(paths1.length); }); - it('should apply filters to toPaths results', () => { - const paths = db.traverse(testNodeIds.a) + it('should apply filters to toPaths results', async () => { + const paths = await db.traverse(testNodeIds.a) .out('LINKS') .filter(node => node.properties.level === 1) .maxDepth(2) @@ -550,9 +550,9 @@ describe('TraversalQuery', () => { }); }); - describe('allPaths() - Limited Path Finding', () => { - it('should find multiple paths to target', () => { - const paths = db.traverse(testNodeIds.a) + describe('allPaths() - Limited Path Finding', async () => { + it('should find multiple paths to target', async () => { + const paths = await db.traverse(testNodeIds.a) .out('LINKS') .allPaths(testNodeIds.e, 10); @@ -565,17 +565,17 @@ describe('TraversalQuery', () => { } }); - it('should respect maxPaths limit', () => { - const paths = db.traverse(testNodeIds.a) + it('should respect maxPaths limit', async () => { + const paths = await db.traverse(testNodeIds.a) .out('LINKS') .allPaths(testNodeIds.e, 2); expect(paths.length).toBeLessThanOrEqual(2); }); - it('should find different paths in diamond pattern', () => { + it('should find different paths in diamond pattern', async () => { // A can reach E via B or C - const paths = db.traverse(testNodeIds.a) + const paths = await db.traverse(testNodeIds.a) .out('LINKS') .allPaths(testNodeIds.e, 5); @@ -589,18 +589,18 @@ describe('TraversalQuery', () => { expect(uniquePaths.size).toBeGreaterThan(1); }); - it('should return empty array when no paths exist', () => { - const isolated = db.createNode('Node', { name: 'Isolated' }); + it('should return empty array when no paths exist', async () => { + const isolated = await db.createNode('Node', { name: 'Isolated' }); - const paths = db.traverse(testNodeIds.a) + const paths = await db.traverse(testNodeIds.a) .out('LINKS') .allPaths(isolated.id); expect(paths).toEqual([]); }); - it('should respect maxDepth in allPaths', () => { - const paths = db.traverse(testNodeIds.a) + it('should respect maxDepth in allPaths', async () => { + const paths = await db.traverse(testNodeIds.a) .out('LINKS') .maxDepth(1) .allPaths(testNodeIds.e); @@ -610,9 +610,9 @@ describe('TraversalQuery', () => { }); }); - describe('paths() - Unified Path Wrapper', () => { - it('should find paths to target without options', () => { - const paths = db.traverse(testNodeIds.a) + describe('paths() - Unified Path Wrapper', async () => { + it('should find paths to target without options', async () => { + const paths = await db.traverse(testNodeIds.a) .out('LINKS') .paths(testNodeIds.e); @@ -624,29 +624,29 @@ describe('TraversalQuery', () => { } }); - it('should limit results with maxPaths option', () => { - const paths = db.traverse(testNodeIds.a) + it('should limit results with maxPaths option', async () => { + const paths = await db.traverse(testNodeIds.a) .out('LINKS') .paths(testNodeIds.e, { maxPaths: 1 }); expect(paths.length).toBeLessThanOrEqual(1); }); - it('should respect maxDepth option', () => { - const paths = db.traverse(testNodeIds.a) + it('should respect maxDepth option', async () => { + const paths = await db.traverse(testNodeIds.a) .out('LINKS') .paths(testNodeIds.e, { maxDepth: 1 }); expect(paths).toEqual([]); }); - it('should use toPaths internally when no maxPaths', () => { - const pathsResult = db.traverse(testNodeIds.a) + it('should use toPaths internally when no maxPaths', async () => { + const pathsResult = await db.traverse(testNodeIds.a) .out('LINKS') .maxDepth(2) .paths(testNodeIds.e); - const toPathsResult = db.traverse(testNodeIds.a) + const toPathsResult = await db.traverse(testNodeIds.a) .out('LINKS') .maxDepth(2) .toPaths() @@ -655,12 +655,12 @@ describe('TraversalQuery', () => { expect(pathsResult.length).toBe(toPathsResult.length); }); - it('should use allPaths internally when maxPaths specified', () => { - const pathsResult = db.traverse(testNodeIds.a) + it('should use allPaths internally when maxPaths specified', async () => { + const pathsResult = await db.traverse(testNodeIds.a) .out('LINKS') .paths(testNodeIds.e, { maxPaths: 3 }); - const allPathsResult = db.traverse(testNodeIds.a) + const allPathsResult = await db.traverse(testNodeIds.a) .out('LINKS') .allPaths(testNodeIds.e, 3); @@ -668,9 +668,9 @@ describe('TraversalQuery', () => { }); }); - describe('Complex Multi-Hop Traversals', () => { - it('should handle deep traversals', () => { - const nodes = db.traverse(testNodeIds.a) + describe('Complex Multi-Hop Traversals', async () => { + it('should handle deep traversals', async () => { + const nodes = await db.traverse(testNodeIds.a) .out('LINKS') .maxDepth(10) .toArray(); @@ -678,8 +678,8 @@ describe('TraversalQuery', () => { expect(nodes.length).toBeGreaterThan(0); }); - it('should combine multiple traversal methods', () => { - const nodes = db.traverse(testNodeIds.a) + it('should combine multiple traversal methods', async () => { + const nodes = await db.traverse(testNodeIds.a) .out('LINKS') .maxDepth(2) .minDepth(1) @@ -691,9 +691,9 @@ describe('TraversalQuery', () => { expect(nodes.every(n => n.properties.level >= 1)).toBe(true); }); - it('should traverse mixed edge directions', () => { + it('should traverse mixed edge directions', async () => { // Start from middle node, go both ways - const nodes = db.traverse(testNodeIds.c) + const nodes = await db.traverse(testNodeIds.c) .both('LINKS') .maxDepth(2) .toArray(); @@ -701,16 +701,16 @@ describe('TraversalQuery', () => { expect(nodes.length).toBeGreaterThan(3); }); - it('should handle graphs with multiple edge types', () => { + it('should handle graphs with multiple edge types', async () => { // Add different edge type - db.createEdge(testNodeIds.a, 'SPECIAL', testNodeIds.b); + await db.createEdge(testNodeIds.a, 'SPECIAL', testNodeIds.b); - const linkNodes = db.traverse(testNodeIds.a) + const linkNodes = await db.traverse(testNodeIds.a) .out('LINKS') .maxDepth(1) .toArray(); - const specialNodes = db.traverse(testNodeIds.a) + const specialNodes = await db.traverse(testNodeIds.a) .out('SPECIAL') .maxDepth(1) .toArray(); @@ -719,8 +719,8 @@ describe('TraversalQuery', () => { expect(specialNodes.length).toBe(1); }); - it('should handle very large depth limits gracefully', () => { - const nodes = db.traverse(testNodeIds.a) + it('should handle very large depth limits gracefully', async () => { + const nodes = await db.traverse(testNodeIds.a) .out('LINKS') .maxDepth(1000) .toArray(); @@ -731,29 +731,29 @@ describe('TraversalQuery', () => { }); }); - describe('Edge Cases and Error Handling', () => { - it('should handle traversal from non-existent node gracefully', () => { - const nodes = db.traverse(99999) + describe('Edge Cases and Error Handling', async () => { + it('should handle traversal from non-existent node gracefully', async () => { + const nodes = await db.traverse(99999) .out('LINKS') .toArray(); expect(nodes).toEqual([]); }); - it('should handle empty graph', () => { + it('should handle empty graph', async () => { const emptyDb = new GraphDatabase(':memory:'); - const node = emptyDb.createNode('Node', { name: 'Only' }); + const node = await emptyDb.createNode('Node', { name: 'Only' }); const nodes = emptyDb.traverse(node.id) .out('LINKS') .toArray(); expect(nodes).toEqual([]); - emptyDb.close(); + await emptyDb.close(); }); - it('should handle traversal with non-existent edge type', () => { - const nodes = db.traverse(testNodeIds.a) + it('should handle traversal with non-existent edge type', async () => { + const nodes = await db.traverse(testNodeIds.a) .out('NONEXISTENT') .toArray(); @@ -770,12 +770,12 @@ describe('TraversalQuery', () => { expect(query2).toBe(query3); }); - it('should handle concurrent traversals', () => { - const nodes1 = db.traverse(testNodeIds.a) + it('should handle concurrent traversals', async () => { + const nodes1 = await db.traverse(testNodeIds.a) .out('LINKS') .toArray(); - const nodes2 = db.traverse(testNodeIds.b) + const nodes2 = await db.traverse(testNodeIds.b) .out('LINKS') .toArray(); @@ -785,18 +785,18 @@ describe('TraversalQuery', () => { }); }); - describe('Performance and Memory', () => { - it('should handle large result sets', () => { + describe('Performance and Memory', async () => { + it('should handle large result sets', async () => { // Create a larger graph - const root = db.createNode('Node', { name: 'Root' }); + const root = await db.createNode('Node', { name: 'Root' }); // Create 50 child nodes for (let i = 0; i < 50; i++) { - const child = db.createNode('Node', { name: `Child${i}` }); - db.createEdge(root.id, 'LINKS', child.id); + const child = await db.createNode('Node', { name: `Child${i}` }); + await db.createEdge(root.id, 'LINKS', child.id); } - const nodes = db.traverse(root.id) + const nodes = await db.traverse(root.id) .out('LINKS') .maxDepth(1) .toArray(); @@ -804,18 +804,18 @@ describe('TraversalQuery', () => { expect(nodes).toHaveLength(50); }); - it('should not leak memory with cycles', () => { + it('should not leak memory with cycles', async () => { // Create circular graph: X -> Y -> Z -> X - const x = db.createNode('Node', { name: 'X' }); - const y = db.createNode('Node', { name: 'Y' }); - const z = db.createNode('Node', { name: 'Z' }); + const x = await db.createNode('Node', { name: 'X' }); + const y = await db.createNode('Node', { name: 'Y' }); + const z = await db.createNode('Node', { name: 'Z' }); - db.createEdge(x.id, 'LINKS', y.id); - db.createEdge(y.id, 'LINKS', z.id); - db.createEdge(z.id, 'LINKS', x.id); + await db.createEdge(x.id, 'LINKS', y.id); + await db.createEdge(y.id, 'LINKS', z.id); + await db.createEdge(z.id, 'LINKS', x.id); // Should complete without hanging - const paths = db.traverse(x.id) + const paths = await db.traverse(x.id) .out('LINKS') .maxDepth(10) .toPaths(); @@ -823,10 +823,10 @@ describe('TraversalQuery', () => { expect(paths).toBeDefined(); }); - it('should handle deep path exploration efficiently', () => { + it('should handle deep path exploration efficiently', async () => { const start = Date.now(); - db.traverse(testNodeIds.a) + await db.traverse(testNodeIds.a) .out('LINKS') .maxDepth(5) .toPaths(); diff --git a/tests/unit/concurrency.test.ts b/tests/unit/concurrency.test.ts index 32102ab..763ed07 100644 --- a/tests/unit/concurrency.test.ts +++ b/tests/unit/concurrency.test.ts @@ -10,8 +10,8 @@ import { GraphDatabase } from '../../src/core/Database'; import { enableWAL, withRetry, WriteQueue } from '../../src/utils/concurrency'; -describe('Concurrency Utilities', () => { - describe('enableWAL()', () => { +describe('Concurrency Utilities', async () => { + describe('enableWAL()', async () => { let db: GraphDatabase; let dbPath: string; @@ -21,8 +21,8 @@ describe('Concurrency Utilities', () => { db = new GraphDatabase(dbPath); }); - afterEach(() => { - db.close(); + afterEach(async () => { + await db.close(); // Clean up temp file try { require('fs').unlinkSync(dbPath); @@ -91,7 +91,7 @@ describe('Concurrency Utilities', () => { }); }); - describe('withRetry()', () => { + describe('withRetry()', async () => { it('should execute operation successfully on first try', async () => { const operation = jest.fn(() => 'success'); @@ -226,7 +226,7 @@ describe('Concurrency Utilities', () => { }); }); - describe('WriteQueue', () => { + describe('WriteQueue', async () => { let queue: WriteQueue; let db: GraphDatabase; @@ -235,8 +235,8 @@ describe('Concurrency Utilities', () => { db = new GraphDatabase(':memory:'); }); - afterEach(() => { - db.close(); + afterEach(async () => { + await db.close(); }); it('should execute single operation', async () => { @@ -249,10 +249,10 @@ describe('Concurrency Utilities', () => { }); it('should return operation result', async () => { - const node = db.createNode('Test', { value: 42 }); + const node = await db.createNode('Test', { value: 42 }); - const result = await queue.enqueue(() => { - return db.getNode(node.id); + const result = await queue.enqueue(async () => { + return await db.getNode(node.id); }); expect(result?.id).toBe(node.id); @@ -322,7 +322,7 @@ describe('Concurrency Utilities', () => { enableWAL(db); const writes = Array.from({ length: 100 }, (_, i) => - queue.enqueue(() => db.createNode('Job', { index: i })) + queue.enqueue(async () => await db.createNode('Job', { index: i })) ); const nodes = await Promise.all(writes); @@ -331,7 +331,7 @@ describe('Concurrency Utilities', () => { expect(new Set(nodes.map(n => n.id)).size).toBe(100); // All unique IDs // Verify all nodes were created - const allNodes = db.nodes('Job').exec(); + const allNodes = await db.nodes('Job').exec(); expect(allNodes).toHaveLength(100); }); @@ -416,7 +416,7 @@ describe('Concurrency Utilities', () => { }); }); - describe('Integration: Full concurrency stack', () => { + describe('Integration: Full concurrency stack', async () => { let db: GraphDatabase; let queue: WriteQueue; @@ -426,16 +426,15 @@ describe('Concurrency Utilities', () => { queue = new WriteQueue(); }); - afterEach(() => { - db.close(); + afterEach(async () => { + await db.close(); }); it('should combine WAL + retry + queue for safe concurrent writes', async () => { // Simulate high-concurrency scenario const writes = Array.from({ length: 50 }, (_, i) => queue.enqueue(() => - withRetry(() => - db.mergeNode('Job', { url: `https://example.com/job/${i}` } as any, { title: `Job ${i}` } as any) + withRetry(async () => await db.mergeNode('Job', { url: `https://example.com/job/${i}` } as any, { title: `Job ${i}` } as any) ) ) ); @@ -446,21 +445,20 @@ describe('Concurrency Utilities', () => { expect(results.every(r => r.created)).toBe(true); // Verify all nodes exist - const nodes = db.nodes('Job').exec(); + const nodes = await db.nodes('Job').exec(); expect(nodes).toHaveLength(50); }); it('should handle merge conflicts gracefully', async () => { - db.createPropertyIndex('Job', 'url'); + await db.createPropertyIndex('Job', 'url'); // Create initial node - db.createNode('Job', { url: 'https://example.com/job/1', title: 'Original' }); + await db.createNode('Job', { url: 'https://example.com/job/1', title: 'Original' }); // Multiple concurrent merges of same node const merges = Array.from({ length: 10 }, (_, i) => queue.enqueue(() => - withRetry(() => - db.mergeNode( + withRetry(async () => await db.mergeNode( 'Job', { url: 'https://example.com/job/1' } as any, undefined, @@ -477,22 +475,20 @@ describe('Concurrency Utilities', () => { expect(results.every(r => r.node.id === results[0].node.id)).toBe(true); // Final viewCount should be from last merge - const final = db.getNode(results[0].node.id); + const final = await db.getNode(results[0].node.id); expect((final?.properties as any).viewCount).toBe(9); }); it('should maintain data consistency under load', async () => { // Create nodes with edges in high-concurrency scenario - const companyNode = db.createNode('Company', { name: 'TechCorp' }); + const companyNode = await db.createNode('Company', { name: 'TechCorp' }); const operations = Array.from({ length: 100 }, (_, i) => queue.enqueue(async () => { - const job = await withRetry(() => - db.createNode('Job', { title: `Job ${i}` }) + const job = await withRetry(async () => await db.createNode('Job', { title: `Job ${i}` }) ); - await withRetry(() => - db.createEdge(job.id, 'POSTED_BY', companyNode.id) + await withRetry(async () => await db.createEdge(job.id, 'POSTED_BY', companyNode.id) ); return job; @@ -504,7 +500,7 @@ describe('Concurrency Utilities', () => { // Verify data integrity expect(jobs).toHaveLength(100); - const allJobs = db.nodes('Job').exec(); + const allJobs = await db.nodes('Job').exec(); expect(allJobs).toHaveLength(100); // Count edges using SQL From cda21d809a29939eabc500731d9f020cee17d860 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Wed, 1 Apr 2026 18:34:44 +0000 Subject: [PATCH 3/3] fix: convert Database API to async, fix TypeScript errors in tests Agent-Logs-Url: https://github.com/michaeloboyle/sqlite-graph/sessions/c2e30f86-7c89-4c8c-a70d-c9ff39267e19 Co-authored-by: michaeloboyle <61171+michaeloboyle@users.noreply.github.com> --- .../transform_tests_v2.cpython-312.pyc | Bin 0 -> 34020 bytes scripts/transform_tests.py | 717 ++++++++++++++++++ src/core/Database.ts | 101 ++- src/index.ts | 10 +- src/query/NodeQuery.ts | 12 +- src/query/TraversalQuery.ts | 16 +- tests/integration/graph-operations.test.ts | 4 +- tests/integration/job-pipeline.test.ts | 12 +- 8 files changed, 812 insertions(+), 60 deletions(-) create mode 100644 scripts/__pycache__/transform_tests_v2.cpython-312.pyc create mode 100644 scripts/transform_tests.py diff --git a/scripts/__pycache__/transform_tests_v2.cpython-312.pyc b/scripts/__pycache__/transform_tests_v2.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c9e1494e1d873a9c0c64fc71f60974a3b3fca895 GIT binary patch literal 34020 zcmeHwdr(_xmY=TP?*|AF;w2si1U58&;|Fex4aN`rY(H_+f-CbD?km~Yvgo0(~P z>Rw%eB=EGmcQzT>e7gGXcfa@Jobx;9-2b4{sYUQPLjGmH%BkBzN0oF@Z_;ycxL{UG8r0^$!znAYpEX-44Ozb1Z zB&2vuN=n9LK8Ytjk|L2$#EsV=q@5(FN}tFlLGxxz?vqhC{UqU%`{bk)&o!A(N=ZoB zn9TLR;7aL}``GyZfpT}ffOK-4?zm4vD!hrz>OV)*bD2=bh?>vQjakVuQMOD}Efeo76StO$vCsbx zd}fK5WXS27Rm2E)Oop?I$w=o_SRF(^ELm5V+e5`9PIpg?=y59`9~=8wZm;hf8FtoF z*IX_qRX;jH4c5Eg9qRWuH^GK-wQt3b9;*MU$6fE?HmfeQ2;E)RH`iUg8Oy!`jqBnX z*VRoqdYsf&C<_hu%RA7JanYhyH-0dx)QvYsb*AywsM;{z5>;z%_e_?4aOH!Vs8;89 zOdRkN&o%mJR^Iz9QJsFW;{6LV+oM^z6D`km*-?XWvi1F2(fpz*Il5W?{`o*@z%jGo zd*wmL!^(eN7djbwcg{5Ho;^F~{{H2~tn6?11WNB-`BqKP6l{244w?U?FlsbSo}7|T z5=#LyJSt^Vd{R`#T0YQuNR1CD!@->M6hhMH$9=E}ccJ6}5A zJn#6p<>SgHE&paya;d4Fc%1aak2gVQ#YI3 z)GoO5Cj3!|8l<*EkhE?PHH@hY5-z5Jl1R)clJEgt>;+1iqY-_C70wY4N{>Gt8Fw|n zQPo@oe3KF%PhE2K$!U>Y>TPQ2bM_3{`mfloj=1{~?T0RG{Vp5q_2_hQ+Z;nc0g^Xu zeGa(Bgk!f8NJpow-&2Q=na>SSf>+bojqi54Ybs`?G0CvwMoij0GBOksQ_h&&?Q~E* zeRc`85faBFPzVH+(~TMcrwH3?SuUrOba&x=y4X^v1CXE>{@e`^jEk1!Xaj1QBqp~6 zvZi)2+QPtTT3tHcvY;`1TRg3rQU&Tl<&3e8(bUt@`V77Nr;y)Q-^(`$cL^a{_c%#Y zj+4yWyLz1Z-8QnHa`t$JZo+uE`$?w_sB8FLT~}9U2TI@O>2t=D*c?>vwPB~rW2<(J zc(~*an+r}}Ckc0{kx|_155K7v2$Bamr{CSh*0+nzu&W1%ZmJy;x=`Xa2;huYo4#E>=?G*8s2K~B zRTkVrtE$N&vuBvu;i9E3cJz7L z95PnIA6{dN(l{kgcp!@X91u;4FFO!@mKd)ANVoDs0`-q5aZ-ZmaefV^T6r*@@$&H# zQWSDAW5Cq{2LZIdPv%i^rL5o}#FX488PITd6W>6e%DCMCcni5Z*C!nSDstrqr6c85 zzElFZ64!zUEn576`|<%3_bc)JIDW~M;kJ6Rxx4FqhT4+8PEcwNmtW$WD^*zE%qhoY zw}Y8e04)wX=a|9+v^V>K`YX70jVXN~Il$ETxs0{V&__wNO*smr3hIbx*cBgA#?1!d z%plO1WaynS8E^vy&T26V^NUKdMLC2QBJ?(yL zty#d;4r-n#VQ$PC$t##kb|8aRAXLD?UCcH;cw# z54%lDfqH=R-Q?ZZOv3gqJhjR0x?=~=96ElW8qNsdBN6j;U`1Sy=Y|b!%I>=5H>&sT zBD;NTDxb?adZ63q?&)(5JK(}OeAROkz9GGj?A{E2TkKv*XLq$%V(+Z;%JA92vZ5?O zg~+C5Ia{C4(Z0DX?X2q@^{P==i<5tjD6eHjHJgZPgkLcc2%?P5r!RE0bRF2=#a90` z^(MLnr_JzHkaK~k$KB%qK~W9FQqAa=+;R1)(?wzmg4%?l(HR2_EX3_XSF&DeA*<8%dVjg;b%v>2f7g5GVJv9jgT<~h=)8*$`z9X&2nwXL)D{H#BfZ5Nju<- zf%%T9*dI_pOy;-(>0>I?jlkgE@VL|a$9xiHsdMI3U zhRJ%%uZWtm{c^++6H&bZPSoDJ8yS6Z)R;G|pV9|LLj+^2@i#xuHb;%Q)4C~Lpf~6U zb_WI{#+v8GoCQN^uqxE>;f~oIj~gR~ZIQ-~r=#@w3(wAXh0k{}=N*y8Zoz{g;zQZ2 z?6HC_ej{RRh?;Yz4^ADt+crr=jh01g!Tk+08}3yFn`dhMM;0u#p|-h8jHTJ%9yM43 z```aYaz=_-#bXs+yg6dr!rkZ#nLf;$&6_hlwuXzhM2uSnB|L``%msn=pkwApNHteF ztD((X{RbBeraLz#ZUhv;oQR?7&&5Adex#f)da9sLT%a#r3ZJ+{cRJ}Sec{eN`cQx5 zje*6&@?i6$wg+tw+e7{H#OxqlxNB0yRRH=1nwX+y{UJNlXs#z*+!!(Likgdp#MH4V zVlmODsh`W#qNPTW!N`tSL|?qLOO%~6T{Tq|sJvVMzGA6`fQhiwN{F=C;~lH%9K;P^ zI%hvnAJYO(VgP2;$`48Ky9e+Y13V{@NSB~VknD=O4ipgPDF%=fXvHUSf?p>nM?4)* z-6Rr|{GMe6Sw7mked1JfP9j}~G;*#58PX_{(xB2Z)Si@BMJO+j&Y_)D>D_nCG3d16 z_T=bbKd)hi>r3;%F)4uV; zQA7SSLrK_B5|lB9%JDdnsQSZ=vl}1R(dCU{b7Rz)L+4gc-k5%G>b;u1WH?3*5%8oKM69HEVsUnnG|swI&`RlTGXX-(rtS1$yfsn)`@o(4{G44@?- z@Z&_eNR)Cx@I*TZ!o>;JI?>$G|{+; z38cB$Rpvn_1^&SOl*A;r#5hun`d5&SC+NASfzgOxl2S|n%RD>@3ppn-52OqO0GDxz z8FKOgBLReTf)U^s6_C+s+nBj_!uF@hLkabgBkCC~$wY5~XdN?DAZFj3=1J(`S13JW zFODG*Jk-R)6Xqb6$v8fwJV|7eF)b$Dc?Lx=KU0Zw6bT}p>kUj|XBfG4%>kg#sI6a9 zPyX4LnrmumZ^nA2V1&8Vu=ac-fhEKTNy=I0+Vd5btjbh!V!o%}l`xxdlvJl;Hhug- z4@a8Dd-Mzk!PtN+5S#|pf};$9#+ZLtk%T>;5(zPx@7>VZO?Gz*^MW%DScpBB-fp^F z(`4U=Y5a}^H}Lr$w5rRKMN9C&3h9!4UxnAOq6Xfalog{+LXLKw;I!mf0_rWeCEy3} z3Azi#4Wgh41^ZBN0fIOyKwU-=nA}%;Zg2-H^)`y_Lcu#I=z}1p!sl-nFiBYK71Je& zxl%I#Ik?*qW-mOBsdpj2AHbh`JLUiubq2t>l&bM&{v$Rl(9=S)ewet$eOEeWt-_9*sU2eRwmXX;=qs zZYc>8k7N&I4;3L5ZP`32TPWN;e~>BMzmhr@G5~S%w?9-hXAjrx2^a5)826^3j)l)9 z7>DOtGaUPdT!gO$PToH^bMD@SAT`s;KczjNyTg_#R<)v>2;rsyPqX?<#+ma+h;`Yyd znmA>@HN$*Y2-j2UbRfP4kvZgs$Dd`5xe)+ibvz!FU^b_I9-fwFp6f|Lj{@jJT5mT#H7cNHl`G5W5#5x#-=2GX3{dZ&{vifqx*><#13)=sgkS~ns5>Se2H1fet}d5j*csDfkb%V>kYa`K zH+o11I(3erE)JK!gtFGTo$#9kj03?LF?~EgUT%2ZL9v;u+=KmByWpqWjieI-1Vebq zq8XM1GlQCldjZ#-nt@0E8vfjW4g~19=$DE-pv{eWe*NDmMdrM3zsF=(jvq~MISc08 z>BCcp>5^@XdHeXWWG*NFnbsE8+Jbo?Jk)MnKbHd~>auxeC>^aI#T*B)4ZTyncl-Ss zz)tQQoH+PF+oWUSh@W6N2oq!8;#Wo2&q1)c>T+4qJ!-M~b&DJx^NGF~adnm{OQ1G5 z8hSTG1#dB?E&lyb^n+91&YnCqeSYeEz;X9d#8etO8P?YMn->8ZXbC9p9{t2%3$}%t z9~_=5|D<9YNL>F;E&^}}WmX^BG1oA+IkcNG?eOn^o}B}C7p;Z&t7oc%4fg$bD#;=?pc8i#GW?7K+}O%bTxbirW1hi-w|L)+dH?^qj%^9aq?}F>0%u)GZo| zgQl>t%-_6J&tnvTDlKgmY4Zd7KhYEm*`Xt}^^J(Ofs3jRo(o+92*wm{JWOvm61E`CKPNIeF7c^8pDQ)^=G5yRzeYItH$d=m+pyZ z_eS|!meBqW56vE$JNfuXxO7KEyEAIcnwC$=?4;hcTt4merDVo}2t8P)S0FDM4HD=Me$46{tLqN!FmP5?rJd6wrBz`6{{l zuR-prf$e0)2dGR5N9`b`8j!n-Q_@pN<$5I-`Bwsp3Z&!fq^Q#74>@Gu0g`Bnj48(? zz(pp@oHN#q<7%EX71x}C^ZC+TvntLt zkE~HcHS~2dU(a&Qr-9;4RWBa4T_@L!@su$`(%2?*&Ft6`E~`)PBLv)Xd;~Ib%TIuh zW#t2Z0PWDQqXyp^Gof6dWpjqvmCSFaTvMbDYJm7{TyK`&CiTf&gsbjL zmSI?32Df8lo}TS93GnnO>j+OvGOxu~+2zvj_n4>ubsEJd{=~rQPCHg{IskE8cT#T0 zP#SgzQQdAe!QXHj_nhpl(sSb2?0tJmI z;5i_>f%<)ji0RLqYCe1L)amAcOi(Wu^JPm;oVsfA*A`)>s6G0ynJ9d5Z6Ne zF+6n~WW#PTru;w4&RtFI13l!O^3r8HnY>0?YECBi3!iDr!`gD7a!)ea`t_6h9Knf@ zkIcmWI^YG3?oQ)GBb~E-UJ}vlS!dRZxp}Xbi$FL8C;nf>LoB>L9)e>>UNZ*~7z!I| zR&o%vNgPBS$3Xx+Y%L7}Vf&zDu4w+3W`}u z{Si>isH7Diw1;zN7Edwrbax^#N=HhC`}w5QH3+dO(HssdK~EsF*HpARmj=^wDbV@^ zqLfOrC80Kr)VHaBpSg@UO{hxJf8o`jL308qvp9|-PNS!TPP!Bv2iFQ=&ty&s^!%sL zZ*iK?r-mLv=BsMsI$T`KfeJ-tY4{_UC#jGMphrAsDG+?Zt$78I*;Y}a$dQl3E%;8 z)|#pDy&*SgF7fUg#`1i5@x9?y+xZmQnahs1FArL4@hJfbY8A9O)tDLMV}?gA(d%RR zzWi5gzvQ;cm!D|`j#+)0ft0JvXXX370MeAkr<>r5Tg79AYqTdT%`DH(Jj?OCUD*dk zz9Q&Bb=q1NWoj+oqGG{FYkb9ge2K3l&iy_2X?z*;f|PN-Y|KWQeKz1TWn>QiCR8F% zIoDAz1NhUG5|g=rRp*VB`bzJKNQ=(~VZP4-q4m4+v9b)SDC0hww1=lY2gj zpm$ldr`PF%mQYRJ8fYQgVZaw6Gu18+>K1q1o}m%IuiQ;_eA?$wcQ+D%{f+2c2;}y;&He@R#%_B zVMbqd5X6Usja6d{C!B>h+b4L4&Cc#sRwJnZr24A+l=>bti~1K3*u~VJ!dHyAA?!{n z1aYs@kqQ_KK}R+^o;LiX?5FmR?N6G2x-s17_%$_)O3?@!O*tV3Bez(B;Z?+Ew>Jxn zgPjO~b@MYC3p(ss>A_R1UwIA{p#&V#J=zaLj(mBrl8jC02G}fsTOVwXh0#>%6uOU; zFp&!q@Dzg8HSOgr#%e-F5nUs$p_{8=RlJbmU@9xMz-G+=l_F0mP7>p%DzIW_or=UX z*tZC0xyAl~TC%E~8a4;kJ&`3oVOfL~$FW8gF=PA=8;7t~m7^QMz{uSH9n=T|-VNfl zMHXcSR!J1q1K*0}Bq9Xk5R(Ej>w%#`izHr}iT%M=gS9v_2%~WMYy{YQ$FywG7#tps zW#e!B6qsMbf7Vn4nWy0lV|un8xYMsXNiZWp_i*RM^6)iVFd)o{407>}E7u?;l%9y! zgT))TZUN(RY*_+zK{~`;VDW#Y|3*v+h(5s4pi_Vj7*>vyF=T~IdANVDu58j4Lj5J= z^2cb?Sb&XTT=ZWpJ85aol3JuU#BUQ6VUkxIaNKv!IPdia`smy$zh+6x>z4vh3|gxQgKWNg_?fvX44S4A#=H?}hnm-+ zUz=npmRc7UOlP8C;&voN%p42Zv3Xk*WF&S#e`|vqX=}az$Y)jdM>iham{ZQzGF7d# zw&HnqEhiFTjn`6V4c2qflIl>y%ottY_N@L$xc&%Je=J;bEUYbrhUZ%mEO<}Fn#P5i zA0B>Iu{B(=b?!7%(HOQg`rBD^z6*?HUmA11^yNa6n(rT+Ie4!v*iM`4!0}AHCfdTN zr4S>d1;q%ohMhmM`$(khC}Tb9H!N6-zeC->Idd~qG+z?7A9~stwx6NTUSdi*8S7=Z zZ>#v;sYe$cT%haQpKfC|oMddLXl2najV0mook9T~hff;|u%a*4OQ7?rXfRhY*!|Ll zT+4LdR3BZkeO}7s?)7V)8$i=Kp;W!%@x{W8p<5q*WA+h7vC?KDF~jX zYxXkvP5#5MUW&`_Uz)je@3Oz+GeCm#XYzx)=duyL4QEB<#5<8j2h-pPmpf?E8!q=Q zA^Ksz_$6Q?En@%Sm?1w}Q~zP@Z0%gn{H|xakA!z0VRj#5YC33R)q;J`{9E*aON_k} z?6#uW_Ry(Mvg@N8YC^l7)$R`0?q+KD(oJXRx6X!}&e9vsO?E8iRfP26yzTQlp18uh z&n#LB7wo&|+n$_^*xMd#etLfLC^RTo92%W_kKW!&x1Ogjbkpa1=pK?TaZVnJnyu5v zrjCL2DtJ=*ux+;OaXVeMJ!0O$-KdV3Yoivp9k$d?Ha|C`d7fzhLg|*dw)vBh(tR_V zpPcs}jan-M#h{$=9$nW&H=UqQUZlZx^in6CciDePIF70GxC7M@Ehtfx1WUula#ovM zslTI}&`tIR96=48yD_5N6xHT|PWLrQlJxL~WG(3-lKUWP^5teDk{*)8H<5*#7;7~? zfB|ClwBn&rMr0 z$S*bl*ZsOz4LBhD$GmD*%;bxLy(*RUfSxq0HP`tWt<5;ozTAw~awGm*GNU)GJ)_q- zmyKM@#&j9xvY7flAOWv;E{k2`pzXxV^}%sjMq2>Vlw@I>F|AdUO0L80V4#7bPvu%A zfjPLV(xk}ZddryV*WHCvU_yJ74_?wU}n5@OIk+JerW$*GV(qqR=ai20op+ z7G@qfI4uo60|}0lsq1U>TJ9^=A@5aY1XowEdQN?6v34u3=|qC_Bm}*ZA-8qYH(yVE z#kWCFm`vfUF;QpZoTxCugUZ~D%N!R z{%hOC3%FH0W=dZsP$YlrGri_roNzsPr8WDzT|C2{0DQ)m-NpZZ+F`H%7VJ8O-%Z|i zGVEjm9%T*Akz37(+eBVv1W)pIh=U;U$K1`|uDy#O+i!c3cRzM)#Clsj=;-O&Xv4Da z_$7FV>-PY5h2*@*KM?qU#g2(a`<&nc7Wr#+l66VI2%>vXdtY_~XNsNWgFHg$Fq4!H z@UjR2%^?Fs7-2{Fvs&)~OD9;3RB)~i9GOyRlkVZE&;ccvN8%fN!0?W9xn2avtPXI@ z%5#XBb3pd59IYV8;m!5u0J^fNazM^u*I5U!pk0*%jG(%MIs#2y7I$_728eJf>QCTz z(wHE;YzzFoMgVXXScRdN)o|6D*wqw2eOS;V0s8Sc2*wFgp@H=}NNLq-sg4E%@y zx`T!tqaG}}VOY@>-c7I6bG4|O9zd4ZUtr#00uTvnNw79--882RTX)WH`RUG&cS3hX zE)GOm2ASQ1Ve2668Vy@VK_Z=Z!mo`Q?S9#3M)S8%O<$P05ctNNl*!q~7`ML=Nlcp- zxOlo`^V|U@r;#!4dLe-ui3fb4Gfd8A#<=B$4B|mlZOVyfbuCCTIkk*&7y4 z02r$idk(;6$Xz1Zb1(o-(a;cYJ_cy{fOj?Ac}w6x1_PE}8Aji|8g_HSKUnkAb}&Ef z_hA2(>`+5?utdr(yrp(t_b?NQSD)7cu<$LGS+wWYic?YH1n5s-~)+!Lb!mAS$ zXuIVt#@^kkv3oZkhM`e*)q9(8*|$DN7B9%Tcxm6r$Y4Sl zT>#0dSb!D4L+W3nvX|-wK|Y>7+M;$IU}bRy%my<0gVs3A8{dcSH0l;H2xVBIdh8zmdx$-c@M8F?GbRV$FE-8T0N}HRFXIoxB+|P2CDM(uPfb>2m<{wnj9z z1u#K9J9U;WY?#~kr1=+ZKWls1{1=Croa6r1HSwX!Pqn4#4PQ9}WPpb@B%wo;QG6x3jP=U?1ntHOv+;W!vT)KkEIc_lNzo z^-Zv6Hdq4#VMFx~PJMV`_QKq)CvE?+g+6hTK7Hod>5Jjh7n#$Y^yRMb=`QBzJG7(w znWHc4=wloM^nt<1rlI87odK_od$s*i6A3IiR!*b^*2ZMAGYHeE8eY*deQxSp;N;zl z>zF{T!R^87P$g|`h-f$Sj;q#bP2HSqO${DnlctqCF|Fb@)O5vRCjSU6%}t(G#m}@G z!rBeN-Vk^nYj{1=D*F1S6}zyH{tm(X|ynyMh6%5b?NH6h7@j&rVMd#K;l(q320eRqh zpfA`@=e`k9f|0!LPTNG=2kqaHfKo);Ok1$|-r;+Ou(oQA#lNV@;@kkk;l+OtSA@~w zERZhmsmC<=oSWZ*Im3TbVf}y#C?GkYC2!d@4X2)Pk{3WnB%n)LhG`OP&o}op9bMT<(*6 z4|7ks-vk@0G~>2bpC+|8WiDV5%)aJeg%vPZ65$sgz-taRXDt6U^>+Fhj^$1i*y&br zeV6zayr6)6>B-uwiua)fczUK)H4J$A>t0nY|Ie(AKYlH1BTKM0+`rMl18kkeeB z8bbYd`4(bj}ARZipYa|uK;=WF|eU)?_BOW!dvmnI7CrXKM@V+Y04g`{WNr%_G zH||{<-f}$L?{?OKT)G;+0m4pDNGJzi$kjA>yaHh{78EZl_Y66PyGcirmwg=tR)|OM zdQM2yxfFkeBw2`L9T(AApx=#Ni6Qo8tuD$dg|yY)?6_A% z=y)Wt#t(aCb?BZwCnkm0RlAV+hJ(b03HWV3*r7M}f`=VqU@;>rm;EnLQ}(4EX!hVY z7qP-wEQI|K#qB}?men3WR~Jx#=Y~F}MB|PS4ayI(G2#jzJ!tf|vnXf)^(#pCLp0(3Mv5lf zf=cJ_3rHAM;gX1u6$A-*#qD(EROQ`jzv5RCi1|CU$WR&VW%PC9ZGaXS%7Z%?eJy)k z5o}}h8^_zekc+g13t2Ux9Zc31zXBzz3XU@R`W25Tk<8-C#FeOW?{nq0sM7FKAvMS$ zk%hkjvG&9bFx}v#Wyo^|e!Iy@#&l|hfKiwCOjjJ%72{VW>1zDq=LVBdj>Y>_Uu5kB z0`J}({9a2)3ok7R=WYWGCbwvMcxpJ<63Y6}GHaPDeO$nxc%diq6n+HvKWcr@`tT5K z-4xN*30$^;cnsd%6cFE+&&cm7gZZ?v7BHK*hZ zMdrk%$kt9_rT0HN^x#nF+1K(j`lg(p7A! zHc2$OF})0leRUOGbezdLF@7XTgru`P)0KpEB|$~V#OP|%clOuOtDHRUOZRFW>zi-B z^oIrWr{}xrhGx{stzqlor*DBzclxahVe5s6_TsDccScdLP-X}7a$stJF5SW8?i@d! z+~0-Ibmd_k>sv=xpQ*ns(^sah-0cIw_t5K|yqUV0IdL(v^->gOO!ZWCpy6(gaye7Z+=R<4LCyTE1+4D7n_K@`&h%8Xtdhuu%XVa{&Tqh8$^&K%ZB?y}j5 zR(oMg(bYwc^uUvEqMcddz6riw0WZTAk$(VD)IAhnybd1;_%vuoaXly)M8Uf#m_Y$@ zyu=jXEf+Yz-5MM~eg!H9cM5zC>2>z>H$mM1+Ei%5y5@l^4 z*S`P{rN;>LT|wwybP#MPCEj?Uml7ky3wT z0*B_j5#9dr*5^qFlLZbYrRt7iLNO`7Z9oT;<(E>G|5%_pV5QX+pGr3@=|ocNuas~D a9b}eew-y@uvyEB%vnBs7TeiPY@qYnf-JUc6 literal 0 HcmV?d00001 diff --git a/scripts/transform_tests.py b/scripts/transform_tests.py new file mode 100644 index 0000000..a63d4e3 --- /dev/null +++ b/scripts/transform_tests.py @@ -0,0 +1,717 @@ +import re +import sys + +def transform_file(filepath, is_pattern_query=False): + with open(filepath, 'r') as f: + content = f.read() + + original = content + + # Step 1: Make beforeEach async if it contains async DB calls + # Match beforeEach(() => { ... }) blocks + content = make_hooks_async(content, 'beforeEach', is_pattern_query) + content = make_hooks_async(content, 'afterEach', is_pattern_query) + + # Step 2: Make it() callbacks async if they contain async DB calls + content = make_it_async(content, is_pattern_query) + + # Step 3: Add await to async DB method calls + content = add_awaits(content, is_pattern_query) + + # Step 4: Transform expect(() => asyncMethod()).toThrow() patterns + content = transform_sync_throws(content) + + return content + +def has_async_calls(code, is_pattern_query=False): + """Check if code block contains async DB calls that need await.""" + # Check for DB methods that are now async + async_patterns = [ + r'\bdb\.createNode\s*\(', + r'\bdb\.getNode\s*\(', + r'\bdb\.updateNode\s*\(', + r'\bdb\.deleteNode\s*\(', + r'\bdb\.createEdge\s*\(', + r'\bdb\.getEdge\s*\(', + r'\bdb\.deleteEdge\s*\(', + r'\bdb\.transaction\s*\(', + r'\bdb\.export\s*\(', + r'\bdb\.import\s*\(', + r'\bdb\.close\s*\(', + r'\bdb\.mergeNode\s*\(', + r'\bdb\.mergeEdge\s*\(', + r'\bdb\.createPropertyIndex\s*\(', + r'\bdb\.listIndexes\s*\(', + r'\bdb\.dropIndex\s*\(', + ] + + # NodeQuery execution methods (not PatternQuery) + nodequery_patterns = [ + r'\.exec\s*\(\s*\)', + r'\.first\s*\(\s*\)', + r'\.count\s*\(\s*\)', + r'\.exists\s*\(\s*\)', + ] + + traversal_patterns = [ + r'\.toArray\s*\(\s*\)', + r'\.toPaths\s*\(\s*\)', + r'\.shortestPath\s*\(', + r'\.paths\s*\(', + r'\.allPaths\s*\(', + ] + + for p in async_patterns: + if re.search(p, code): + return True + + if not is_pattern_query: + for p in nodequery_patterns: + if re.search(p, code): + return True + else: + # For PatternQuery, only NodeQuery exec() needs await but they call db.nodes() not db.pattern() + # Actually in PatternQuery.test.ts, .exec() etc are PatternQuery methods (SYNC) + # So for PatternQuery file, we don't add await to .exec(), .first(), .count() + pass + + for p in traversal_patterns: + if re.search(p, code): + return True + + return False + +def make_hooks_async(content, hook_name, is_pattern_query=False): + """Make beforeEach/afterEach async if they contain async calls.""" + # Pattern: beforeEach(() => { or beforeEach(function() { + # We need to find the full block + result = [] + i = 0 + pattern = re.compile(r'\b' + hook_name + r'\s*\(\s*(?:\(\s*\)|function\s*\(\s*\))\s*=>\s*\{') + + while i < len(content): + m = pattern.search(content, i) + if not m: + result.append(content[i:]) + break + + result.append(content[i:m.start()]) + + # Find the matching closing brace + block_start = m.end() - 1 # position of opening { + block_end = find_matching_brace(content, block_start) + + if block_end == -1: + result.append(content[m.start():]) + break + + block_body = content[block_start+1:block_end] + + if has_async_calls(block_body, is_pattern_query): + # Make it async + original_match = m.group(0) + new_match = original_match.replace('() =>', 'async () =>') + result.append(new_match) + else: + result.append(m.group(0)) + + result.append(content[block_start+1:block_end]) + result.append(content[block_end]) + i = block_end + 1 + + return ''.join(result) + +def make_it_async(content, is_pattern_query=False): + """Make it() test callbacks async if they contain async calls.""" + result = [] + i = 0 + # Match: it('...', () => { or it("...", () => { + pattern = re.compile(r'\b(it|test)\s*\(\s*([\'"`].*?[\'"`]|`[^`]*`)\s*,\s*\(\s*\)\s*=>\s*\{', re.DOTALL) + + while i < len(content): + m = pattern.search(content, i) + if not m: + result.append(content[i:]) + break + + result.append(content[i:m.start()]) + + # Find the matching closing brace for the callback + block_start = m.end() - 1 # position of opening { + block_end = find_matching_brace(content, block_start) + + if block_end == -1: + result.append(content[m.start():]) + break + + block_body = content[block_start+1:block_end] + + if has_async_calls(block_body, is_pattern_query): + # Make it async - replace () => { with async () => { + new_match = m.group(0)[:-1] # remove the { + new_match = new_match.rstrip() + # Find the () => part and make it async () => + new_match = re.sub(r'\(\s*\)\s*=>\s*$', 'async () => ', new_match) + result.append(new_match + '{') + else: + result.append(m.group(0)) + + result.append(content[block_start+1:block_end]) + result.append(content[block_end]) + i = block_end + 1 + + return ''.join(result) + +def find_matching_brace(content, start): + """Find the matching closing brace for content[start] which should be '{'.""" + assert content[start] == '{', f"Expected '{{' at position {start}, got '{content[start]}'" + depth = 0 + i = start + in_string = None + escape_next = False + + while i < len(content): + ch = content[i] + + if escape_next: + escape_next = False + i += 1 + continue + + if ch == '\\' and in_string: + escape_next = True + i += 1 + continue + + if in_string: + if ch == in_string and (in_string != '`' or True): + if in_string == '`': + in_string = None + elif in_string in ('"', "'"): + in_string = None + i += 1 + continue + + if ch in ('"', "'", '`'): + in_string = ch + i += 1 + continue + + # Handle template literal ${} expressions - just skip for brace counting + if ch == '{': + depth += 1 + elif ch == '}': + depth -= 1 + if depth == 0: + return i + + i += 1 + + return -1 + +def add_awaits(content, is_pattern_query=False): + """Add await to async DB method calls.""" + + # Methods that need await (not already awaited) + # We need to be careful not to double-add await + + # DB methods that are now async + async_db_methods = [ + 'createNode', 'getNode', 'updateNode', 'deleteNode', + 'createEdge', 'getEdge', 'deleteEdge', + 'export', 'import', 'close', + 'mergeNode', 'mergeEdge', + 'createPropertyIndex', 'listIndexes', 'dropIndex', + ] + + # Add await to db.method() calls that don't already have await + for method in async_db_methods: + # Match db.method( not preceded by await + # Also handle testDb.close(), etc. + pattern = re.compile(r'(? { => await db.transaction(async (ctx) => { + # db.transaction(() => { => await db.transaction(async () => { + + # Handle: expect(() => db.transaction(...)).toThrow() - these get transformed separately + # Here we handle: db.transaction( or const x = db.transaction( + + result = [] + i = 0 + pattern = re.compile(r'\bdb\.transaction\s*\(') + + while i < len(content): + m = pattern.search(content, i) + if not m: + result.append(content[i:]) + break + + # Check if await already precedes + prefix = content[max(0, m.start()-10):m.start()] + if re.search(r'\bawait\s*$', prefix): + result.append(content[i:m.end()]) + i = m.end() + continue + + # Check if this is inside expect(() => ...) - don't add await here + # Look back further for expect( + # Actually the expect transform happens in transform_sync_throws + # We need to skip if this db.transaction is inside an arrow function argument to expect + prefix_long = content[max(0, m.start()-50):m.start()] + if re.search(r'expect\s*\(\s*\(\s*\)\s*=>\s*$', prefix_long): + result.append(content[i:m.end()]) + i = m.end() + continue + + result.append(content[i:m.start()]) + result.append('await ' + m.group(0)) + i = m.end() + + return ''.join(result) + +def add_await_to_nodequery_exec(content): + """Add await to .exec(), .first(), .count(), .exists() on NodeQuery chains.""" + # These come after db.nodes(...) chains + # Pattern: something).exec() or something).first() etc. + # We add await if not already there + + methods = ['exec', 'first', 'count', 'exists'] + + for method in methods: + result = [] + i = 0 + pattern = re.compile(r'\)\s*\.\s*' + method + r'\s*\(\s*\)') + + while i < len(content): + m = pattern.search(content, i) + if not m: + result.append(content[i:]) + break + + # Check if await precedes the chain + # We need to look back to find the start of the chain + prefix = content[max(0, m.start()-200):m.start()] + + # Check if this is a PatternQuery call (db.pattern()...) + # If the chain contains db.pattern(), skip + # Find the chain start - look back for db.nodes( or db.pattern( + chain_context = content[max(0, m.start()-300):m.end()] + + # Check if it's already awaited by looking at prefix more carefully + # The await would be somewhere before the start of the chain + # This is complex - let's look for 'await' keyword before the nearest assignment or statement start + + # Check if preceded by await (looking at immediate area) + before_match = content[max(0, m.start()-5):m.start()] + + # Let's check the full statement for await + # Find statement start + stmt_start = find_statement_start(content, m.start()) + stmt_prefix = content[stmt_start:m.start()] + + if 'await' in stmt_prefix and not re.search(r'\bawait\b.*\bawait\b', stmt_prefix): + # Already has await in this statement + result.append(content[i:m.end()]) + i = m.end() + continue + + result.append(content[i:m.start()]) + result.append(m.group(0)) # will add await before chain start + i = m.end() + + content = ''.join(result) + + # Better approach: find db.nodes(...) chains and add await before them + content = add_await_before_nodequery_chain(content) + + return content + +def add_await_to_nodequery_exec_pattern_file(content): + """For PatternQuery test file - only add await to db.nodes() chains, not db.pattern() chains.""" + # Same as above but skip db.pattern() chains + content = add_await_before_nodequery_chain(content, skip_pattern=True) + return content + +def add_await_before_nodequery_chain(content, skip_pattern=False): + """Add await before db.nodes(...).....exec() chains.""" + result = [] + i = 0 + + # Find db.nodes( chains that end with .exec(), .first(), .count(), .exists() + # We need to find the full chain + db_nodes_pattern = re.compile(r'\bdb\.nodes\s*\(') + + while i < len(content): + m = db_nodes_pattern.search(content, i) + if not m: + result.append(content[i:]) + break + + # Check if await already precedes + prefix = content[max(0, m.start()-10):m.start()] + if re.search(r'\bawait\s*$', prefix): + result.append(content[i:m.end()]) + i = m.end() + continue + + # Now find if this chain ends with an exec method + # Find the full chain by scanning forward + # First find the closing paren of db.nodes(...) + paren_end = find_matching_paren(content, m.end() - 1) + if paren_end == -1: + result.append(content[i:m.end()]) + i = m.end() + continue + + # Now scan forward through chained method calls + j = paren_end + 1 + chain_end = paren_end + last_exec_end = -1 + + while j < len(content): + # Skip whitespace and newlines + ws = re.match(r'[\s]*', content[j:]) + j += len(ws.group(0)) if ws else 0 + + if j >= len(content) or content[j] != '.': + break + + # Check for method call + method_match = re.match(r'\.\s*(\w+)\s*\(', content[j:]) + if not method_match: + break + + method_name = method_match.group(1) + call_start = j + method_match.start(0) + paren_start = j + method_match.end(0) - 1 + paren_close = find_matching_paren(content, paren_start) + + if paren_close == -1: + break + + chain_end = paren_close + + if method_name in ('exec', 'first', 'count', 'exists'): + last_exec_end = paren_close + + j = paren_close + 1 + + if last_exec_end != -1: + # This chain ends with an exec method - add await before db.nodes( + result.append(content[i:m.start()]) + result.append('await ' + content[m.start():last_exec_end+1]) + i = last_exec_end + 1 + else: + result.append(content[i:m.end()]) + i = m.end() + + return content # Return original - this approach is too complex, use simpler regex + +def find_statement_start(content, pos): + """Find the start of the statement containing pos.""" + # Look back for newline, semicolon, or opening brace + i = pos - 1 + while i >= 0: + ch = content[i] + if ch in (';\n', '\n', '{'): + return i + 1 + if ch == ';': + return i + 1 + i -= 1 + return 0 + +def find_matching_paren(content, start): + """Find matching ) for content[start] which should be '('.""" + if start >= len(content) or content[start] != '(': + return -1 + depth = 0 + i = start + in_string = None + escape_next = False + + while i < len(content): + ch = content[i] + + if escape_next: + escape_next = False + i += 1 + continue + + if ch == '\\' and in_string: + escape_next = True + i += 1 + continue + + if in_string: + if ch == in_string: + in_string = None + i += 1 + continue + + if ch in ('"', "'", '`'): + in_string = ch + i += 1 + continue + + if ch == '(': + depth += 1 + elif ch == ')': + depth -= 1 + if depth == 0: + return i + + i += 1 + + return -1 + +def add_await_to_traversal_exec(content): + """Add await to traversal execution methods.""" + methods = ['toArray', 'toPaths', 'shortestPath', 'paths', 'allPaths'] + + for method in methods: + result = [] + i = 0 + pattern = re.compile(r'\)\s*\.\s*' + method + r'\s*\(') + + while i < len(content): + m = pattern.search(content, i) + if not m: + result.append(content[i:]) + break + + # Check if stmt already has await + stmt_start = find_statement_start(content, m.start()) + stmt_prefix = content[stmt_start:m.start()] + + if re.search(r'\bawait\b', stmt_prefix): + result.append(content[i:m.end()]) + i = m.end() + continue + + result.append(content[i:m.end()]) + i = m.end() + + content = ''.join(result) + + return content + +def transform_sync_throws(content): + """Transform expect(() => asyncMethod()).toThrow() to await expect(asyncMethod()).rejects.toThrow().""" + # Pattern: expect(() => db.method(...)).toThrow(...) + # becomes: await expect(db.method(...)).rejects.toThrow(...) + + # Also handle: expect(() => db.transaction(...)).toThrow(...) + + result = [] + i = 0 + pattern = re.compile(r'expect\s*\(\s*\(\s*\)\s*=>\s*(db\.\w+\s*\()') + + while i < len(content): + m = pattern.search(content, i) + if not m: + result.append(content[i:]) + break + + # Get the method name + method_call_start = m.start(1) + method_name_match = re.match(r'db\.(\w+)', m.group(1)) + if not method_name_match: + result.append(content[i:m.end()]) + i = m.end() + continue + + method_name = method_name_match.group(1) + + # Check if this is an async method (not traverse which is sync) + sync_methods = {'traverse', 'nodes', 'pattern'} + if method_name in sync_methods: + result.append(content[i:m.end()]) + i = m.end() + continue + + # Find the full inner expression: db.method(...) + paren_start = m.start(1) + len(m.group(1)) - 1 + paren_end = find_matching_paren(content, paren_start) + if paren_end == -1: + result.append(content[i:m.end()]) + i = m.end() + continue + + inner_expr = content[m.start(1):paren_end+1] + + # Now find the closing ) of expect(...) + # The expect( opens, then () => db.method(...) is the arg + # expect_paren_start is at expect( + expect_paren_pos = m.start() + m.group(0).index('(') + # Find matching ) for this expect( + # But first we need to handle: the content inside expect is (() => db.method(...)) + # The closing ) of expect is right after paren_end + after_inner = content[paren_end+1:] + close_match = re.match(r'\s*\)', after_inner) + if not close_match: + result.append(content[i:m.end()]) + i = m.end() + continue + + after_expect_close = paren_end + 1 + close_match.end() + + # Now look for .toThrow(...) or .not.toThrow() etc. + rest = content[after_expect_close:] + throws_match = re.match(r'\s*\.\s*toThrow\s*\(', rest) + if not throws_match: + result.append(content[i:m.end()]) + i = m.end() + continue + + # Find the closing ) of toThrow(...) + throws_paren_start = after_expect_close + throws_match.end() - 1 + throws_paren_end = find_matching_paren(content, throws_paren_start) + if throws_paren_end == -1: + result.append(content[i:m.end()]) + i = m.end() + continue + + throw_arg = content[throws_paren_start+1:throws_paren_end] + + # Check if 'await' already precedes expect + prefix = content[max(0, m.start()-10):m.start()] + has_await = re.search(r'\bawait\s*$', prefix) + + await_prefix = '' if has_await else 'await ' + + new_code = f'{await_prefix}expect({inner_expr}).rejects.toThrow({throw_arg})' + + result.append(content[i:m.start()]) + result.append(new_code) + i = throws_paren_end + 1 + + return ''.join(result) + + +# Main transformation - use a simpler line-by-line approach with context +def transform_content(content, is_pattern_query=False, is_transaction=False, is_traversal=False): + """Main transformation function using regex substitution.""" + + lines = content.split('\n') + result_lines = [] + + # Track if we're inside async contexts + # Process line by line is too simple for our needs + # Let's use the full content approach + + return content + + +# Simpler approach using targeted regex replacements + +ASYNC_DB_METHODS = [ + 'createNode', 'getNode', 'updateNode', 'deleteNode', + 'createEdge', 'getEdge', 'deleteEdge', + 'export', 'import', 'close', + 'mergeNode', 'mergeEdge', + 'createPropertyIndex', 'listIndexes', 'dropIndex', +] + +NODEQUERY_EXEC_METHODS = ['exec', 'first', 'count', 'exists'] +TRAVERSAL_EXEC_METHODS = ['toArray', 'toPaths', 'shortestPath', 'paths', 'allPaths'] + + +def simple_transform(content, is_pattern_query=False): + """Apply simple regex transformations.""" + + # 1. Transform expect(() => db.asyncMethod()).toThrow() patterns FIRST + # because this changes what we should await + + # Pattern: expect(() => db.METHOD(...)).toThrow(...) + # Async methods that need this transformation: + async_methods_for_throw = '|'.join(ASYNC_DB_METHODS) + + # Handle: expect(() => db.transaction(async? (...) => {...})).toThrow(...) + # This is complex - let's handle transaction separately + + # Simple pattern for: expect(() => db.METHOD(...)).toThrow(MSG) + # where METHOD is in ASYNC_DB_METHODS (not traverse/nodes/pattern) + def replace_sync_throw(m): + prefix_check = content[max(0, m.start()-10):m.start()] + full_match = m.group(0) + method = m.group(1) + inner_args = m.group(2) + throw_msg = m.group(3) + + await_prefix = 'await ' if not re.search(r'\bawait\s*$', prefix_check) else '' + return f"{await_prefix}expect(db.{method}({inner_args})).rejects.toThrow({throw_msg})" + + # This is getting complex. Let me use a different approach. + # Let's write the transformation as a series of simple find-and-replace patterns + # that we can verify work for specific cases. + + return content + + +if __name__ == '__main__': + filepath = sys.argv[1] + is_pattern = 'PatternQuery' in filepath + result = transform_file(filepath, is_pattern_query=is_pattern) + print(result, end='') + diff --git a/src/core/Database.ts b/src/core/Database.ts index c64de3d..6e66548 100644 --- a/src/core/Database.ts +++ b/src/core/Database.ts @@ -108,6 +108,42 @@ export class GraphDatabase { this.prepareStatements(); } + /** + * Async factory method for creating a GraphDatabase instance. + * Preferred over the constructor for async-first code. + * + * @param path - Path to SQLite database file. Use ':memory:' for in-memory database. + * @param options - Database configuration options + * @returns A Promise resolving to a new GraphDatabase instance + * + * @example + * ```typescript + * const db = await GraphDatabase.create('./graph.db'); + * ``` + */ + static async create(path: string, options?: DatabaseOptions): Promise { + return new GraphDatabase(path, options); + } + + /** + * Get a node by ID synchronously (internal helper). + * @private + */ + private _getNodeSync(id: number): Node | null { + const stmt = this.preparedStatements.get('getNode')!; + const row = stmt.get(id) as any; + + if (!row) return null; + + return { + id: row.id, + type: row.type, + properties: deserialize(row.properties), + createdAt: timestampToDate(row.created_at), + updatedAt: timestampToDate(row.updated_at) + }; + } + /** * Prepare frequently used SQL statements for better performance. * @private @@ -172,7 +208,7 @@ export class GraphDatabase { * console.log(job.createdAt); // 2025-10-27T... * ``` */ - createNode(type: string, properties: T): Node { + async createNode(type: string, properties: T): Promise> { validateNodeType(type, this.schema); validateNodeProperties(type, properties, this.schema); @@ -204,7 +240,7 @@ export class GraphDatabase { * } * ``` */ - getNode(id: number): Node | null { + async getNode(id: number): Promise { validateNodeId(id); const stmt = this.preparedStatements.get('getNode')!; @@ -239,10 +275,10 @@ export class GraphDatabase { * }); * ``` */ - updateNode(id: number, properties: Partial): Node { + async updateNode(id: number, properties: Partial): Promise { validateNodeId(id); - const existing = this.getNode(id); + const existing = this._getNodeSync(id); if (!existing) { throw new Error(`Node with ID ${id} not found`); } @@ -274,7 +310,7 @@ export class GraphDatabase { * console.log(deleted ? 'Deleted' : 'Not found'); * ``` */ - deleteNode(id: number): boolean { + async deleteNode(id: number): Promise { validateNodeId(id); const stmt = this.preparedStatements.get('deleteNode')!; @@ -305,19 +341,19 @@ export class GraphDatabase { * }); * ``` */ - createEdge( + async createEdge( from: number, type: string, to: number, properties?: T - ): Edge { + ): Promise> { validateEdgeType(type, this.schema); validateNodeId(from); validateNodeId(to); // Verify nodes exist - const fromNode = this.getNode(from); - const toNode = this.getNode(to); + const fromNode = this._getNodeSync(from); + const toNode = this._getNodeSync(to); if (!fromNode) { throw new Error(`Source node with ID ${from} not found`); @@ -358,7 +394,7 @@ export class GraphDatabase { * } * ``` */ - getEdge(id: number): Edge | null { + async getEdge(id: number): Promise { validateNodeId(id); const stmt = this.preparedStatements.get('getEdge')!; @@ -387,7 +423,7 @@ export class GraphDatabase { * const deleted = db.deleteEdge(1); * ``` */ - deleteEdge(id: number): boolean { + async deleteEdge(id: number): Promise { validateNodeId(id); const stmt = this.preparedStatements.get('deleteEdge')!; @@ -439,7 +475,7 @@ export class GraphDatabase { traverse(startNodeId: number): TraversalQuery { validateNodeId(startNodeId); - const node = this.getNode(startNodeId); + const node = this.db.prepare('SELECT id FROM nodes WHERE id = ?').get(startNodeId); if (!node) { throw new Error(`Start node with ID ${startNodeId} not found`); } @@ -468,7 +504,7 @@ export class GraphDatabase { * .exec(); * ``` */ - pattern = Record>(): PatternQuery { + pattern = Record>(): PatternQuery { return new PatternQuery(this.db); } @@ -505,14 +541,14 @@ export class GraphDatabase { * }); * ``` */ - transaction(fn: (ctx: TransactionContext) => T): T { + async transaction(fn: (ctx: TransactionContext) => T | Promise): Promise { // Start transaction this.db.prepare('BEGIN').run(); const ctx = new TransactionContext(this.db); try { - const result = fn(ctx); + const result = await fn(ctx); // Auto-commit if not manually finalized if (!ctx.isFinalized()) { @@ -540,7 +576,7 @@ export class GraphDatabase { * fs.writeFileSync('graph-backup.json', JSON.stringify(data, null, 2)); * ``` */ - export(): GraphExport { + async export(): Promise { const nodesStmt = this.db.prepare('SELECT * FROM nodes ORDER BY id'); const edgesStmt = this.db.prepare('SELECT * FROM edges ORDER BY id'); @@ -585,14 +621,14 @@ export class GraphDatabase { * db.import(data); * ``` */ - import(data: GraphExport): void { - this.transaction(() => { + async import(data: GraphExport): Promise { + await this.transaction(async () => { for (const node of data.nodes) { - this.createNode(node.type, node.properties); + await this.createNode(node.type, node.properties); } for (const edge of data.edges) { - this.createEdge(edge.from, edge.type, edge.to, edge.properties); + await this.createEdge(edge.from, edge.type, edge.to, edge.properties); } }); } @@ -606,7 +642,7 @@ export class GraphDatabase { * db.close(); * ``` */ - close(): void { + async close(): Promise { this.db.close(); } @@ -654,12 +690,12 @@ export class GraphDatabase { * ); * ``` */ - mergeNode( + async mergeNode( type: string, matchProperties: Partial, baseProperties?: T, options?: MergeOptions - ): MergeResult { + ): Promise> { validateNodeType(type, this.schema); // Build WHERE clause for all match properties @@ -677,8 +713,7 @@ export class GraphDatabase { } } - return this.transaction(() => { - // Build SQL to find matching node + return await this.transaction(() => { const whereConditions = matchKeys.map( (key) => `json_extract(properties, '$.${key}') = ?` ); @@ -781,20 +816,20 @@ export class GraphDatabase { * ); * ``` */ - mergeEdge( + async mergeEdge( from: number, type: string, to: number, properties?: T, options?: EdgeMergeOptions - ): EdgeMergeResult { + ): Promise> { validateEdgeType(type, this.schema); validateNodeId(from); validateNodeId(to); // Verify nodes exist - const fromNode = this.getNode(from); - const toNode = this.getNode(to); + const fromNode = this._getNodeSync(from); + const toNode = this._getNodeSync(to); if (!fromNode) { throw new Error(`Source node with ID ${from} not found`); @@ -803,7 +838,7 @@ export class GraphDatabase { throw new Error(`Target node with ID ${to} not found`); } - return this.transaction(() => { + return await this.transaction(() => { // Find existing edges const stmt = this.db.prepare(` SELECT * FROM edges @@ -923,7 +958,7 @@ export class GraphDatabase { * db.mergeNode('Job', { url: 'https://...' }, ...); * ``` */ - createPropertyIndex(nodeType: string, property: string, unique = false): void { + async createPropertyIndex(nodeType: string, property: string, unique = false): Promise { const indexName = `idx_merge_${nodeType}_${property}`; const uniqueClause = unique ? 'UNIQUE' : ''; @@ -969,7 +1004,7 @@ export class GraphDatabase { * }); * ``` */ - listIndexes(): IndexInfo[] { + async listIndexes(): Promise { const stmt = this.db.prepare(` SELECT name, tbl_name as 'table', sql FROM sqlite_master @@ -1002,7 +1037,7 @@ export class GraphDatabase { * db.dropIndex('idx_merge_Job_url'); * ``` */ - dropIndex(indexName: string): void { + async dropIndex(indexName: string): Promise { this.db.prepare(`DROP INDEX IF EXISTS ${indexName}`).run(); } } \ No newline at end of file diff --git a/src/index.ts b/src/index.ts index 0757c0c..121f6c7 100644 --- a/src/index.ts +++ b/src/index.ts @@ -8,13 +8,13 @@ * ```typescript * import { GraphDatabase } from 'sqlite-graph'; * - * const db = new GraphDatabase('./graph.db'); + * const db = await GraphDatabase.create('./graph.db'); * - * const job = db.createNode('Job', { title: 'Engineer', status: 'active' }); - * const company = db.createNode('Company', { name: 'TechCorp' }); - * db.createEdge(job.id, 'POSTED_BY', company.id); + * const job = await db.createNode('Job', { title: 'Engineer', status: 'active' }); + * const company = await db.createNode('Company', { name: 'TechCorp' }); + * await db.createEdge(job.id, 'POSTED_BY', company.id); * - * const activeJobs = db.nodes('Job') + * const activeJobs = await db.nodes('Job') * .where({ status: 'active' }) * .exec(); * ``` diff --git a/src/query/NodeQuery.ts b/src/query/NodeQuery.ts index 79ec97b..f63075e 100644 --- a/src/query/NodeQuery.ts +++ b/src/query/NodeQuery.ts @@ -238,7 +238,7 @@ export class NodeQuery { * console.log(`Found ${results.length} active jobs`); * ``` */ - exec(): Node[] { + async exec(): Promise { const sql = this.buildSQL(); const params = this.buildParams(); @@ -277,10 +277,10 @@ export class NodeQuery { * } * ``` */ - first(): Node | null { + async first(): Promise { const original = this.limitValue; this.limitValue = 1; - const results = this.exec(); + const results = await this.exec(); this.limitValue = original; return results.length > 0 ? results[0] : null; } @@ -299,7 +299,7 @@ export class NodeQuery { * console.log(`${count} active jobs`); * ``` */ - count(): number { + async count(): Promise { const sql = this.buildSQL(true); const params = this.buildParams(); @@ -324,8 +324,8 @@ export class NodeQuery { * } * ``` */ - exists(): boolean { - return this.count() > 0; + async exists(): Promise { + return (await this.count()) > 0; } /** diff --git a/src/query/TraversalQuery.ts b/src/query/TraversalQuery.ts index 43093cf..cfd2cd6 100644 --- a/src/query/TraversalQuery.ts +++ b/src/query/TraversalQuery.ts @@ -216,7 +216,7 @@ export class TraversalQuery { * console.log(`Found ${nodes.length} similar jobs`); * ``` */ - toArray(): Node[] { + async toArray(): Promise { const visited = new Set(); const results: Node[] = []; const queue: Array<{ nodeId: number; depth: number }> = [ @@ -286,7 +286,7 @@ export class TraversalQuery { * }); * ``` */ - toPaths(): Node[][] { + async toPaths(): Promise { const paths: Node[][] = []; const queue: Array<{ nodeId: number; path: Node[]; depth: number; visited: Set }> = [ { nodeId: this.startNodeId, path: [], depth: 0, visited: new Set() } @@ -353,7 +353,7 @@ export class TraversalQuery { * } * ``` */ - shortestPath(targetNodeId: number): Node[] | null { + async shortestPath(targetNodeId: number): Promise { const visited = new Set(); const parent = new Map(); const queue: number[] = [this.startNodeId]; @@ -407,10 +407,10 @@ export class TraversalQuery { * const shortPaths = db.traverse(job1Id).paths(job2Id, { maxDepth: 3 }); * ``` */ - paths(targetNodeId: number, options?: { + async paths(targetNodeId: number, options?: { maxPaths?: number; maxDepth?: number; - }): Node[][] { + }): Promise { // Apply maxDepth if provided in options if (options?.maxDepth !== undefined) { this.maxDepth(options.maxDepth); @@ -418,11 +418,11 @@ export class TraversalQuery { // Use allPaths if maxPaths is specified, otherwise use toPaths logic if (options?.maxPaths !== undefined) { - return this.allPaths(targetNodeId, options.maxPaths); + return await this.allPaths(targetNodeId, options.maxPaths); } // Filter toPaths() results to only include paths ending at target - const allPaths = this.toPaths(); + const allPaths = await this.toPaths(); return allPaths.filter(path => path.length > 0 && path[path.length - 1].id === targetNodeId ); @@ -443,7 +443,7 @@ export class TraversalQuery { * console.log(`Found ${paths.length} paths`); * ``` */ - allPaths(targetNodeId: number, maxPaths: number = 10): Node[][] { + async allPaths(targetNodeId: number, maxPaths: number = 10): Promise { const paths: Node[][] = []; const visited = new Set(); diff --git a/tests/integration/graph-operations.test.ts b/tests/integration/graph-operations.test.ts index 3b9addf..b2093c5 100644 --- a/tests/integration/graph-operations.test.ts +++ b/tests/integration/graph-operations.test.ts @@ -133,7 +133,7 @@ describe('Complex Graph Operations - Integration Tests', async () => { founded: 2000 + i, revenue: 1000000 * (i + 1) }) - ); + )); const jobs = await Promise.all(Array.from({ length: 30 }, async (_, i) => await db.createNode('Job', { title: `Job ${i}`, @@ -141,7 +141,7 @@ describe('Complex Graph Operations - Integration Tests', async () => { remote: i % 2 === 0, department: ['Engineering', 'Sales', 'Marketing'][i % 3] }) - ); + )); // Link jobs to companies await Promise.all(jobs.map(async (job, i) => { diff --git a/tests/integration/job-pipeline.test.ts b/tests/integration/job-pipeline.test.ts index 7dbee1e..6715dff 100644 --- a/tests/integration/job-pipeline.test.ts +++ b/tests/integration/job-pipeline.test.ts @@ -267,7 +267,7 @@ describe('Job Application Pipeline - Integration Tests', async () => { it('should calculate skill match percentage for jobs', async () => { // My skills const mySkills = ['TypeScript', 'React', 'Node.js', 'PostgreSQL']; - const skillNodes = await Promise.all(mySkills.mapmap(async name => await db.createNode('Skill', { name, owned: true }) + const skillNodes = await Promise.all(mySkills.map(async name => await db.createNode('Skill', { name, owned: true }) )); // Job requirements @@ -310,7 +310,7 @@ describe('Job Application Pipeline - Integration Tests', async () => { 'Senior Backend Engineer' ]; - const jobs = await Promise.all(jobTitles.mapmap(async title => await db.createNode('Job', { + const jobs = await Promise.all(jobTitles.map(async title => await db.createNode('Job', { title, status: 'active', postedAt: new Date().toISOString() @@ -572,12 +572,12 @@ describe('Job Application Pipeline - Integration Tests', async () => { name: `Company ${i}`, size: i % 3 === 0 ? 'large' : i % 3 === 1 ? 'medium' : 'small' }) - ); + )); // Create skills - const skills = ['TypeScript', 'React', 'Node.js', 'Python', 'Go', 'Rust', 'Java', 'C++'].map( + const skills = await Promise.all(['TypeScript', 'React', 'Node.js', 'Python', 'Go', 'Rust', 'Java', 'C++'].map( async name => await db.createNode('Skill', { name }) - ); + )); // Create 100 jobs const jobs = await Promise.all(Array.from({ length: 100 }, async (_, i) => await db.createNode('Job', { @@ -586,7 +586,7 @@ describe('Job Application Pipeline - Integration Tests', async () => { salary: { min: 100000 + i * 1000, max: 150000 + i * 1000 }, remote: i % 2 === 0 }) - ); + )); // Create relationships await Promise.all(jobs.map(async (job, i) => {