diff --git a/docs/schema-language.md b/docs/schema-language.md index b5bc61cd..db5da4c1 100644 --- a/docs/schema-language.md +++ b/docs/schema-language.md @@ -318,3 +318,99 @@ Retrieving all edges is now as easy as this: ```cpp edges.slice(nodes[i].edges_range) ``` + +## Imports + +Schemas can be split across multiple files using import statements. +An import pulls in all definitions (structs, enums, constants, archives) from +another file, making them available for use in the importing file. + +```cpp +import "path/to/types.flatdata"; +``` + +Import statements must appear at the top of the file, before any namespace or +type definitions. + +### Path Resolution + +Import paths are resolved **relative to the file** containing the import +statement: + +```cpp +import "types.flatdata"; // same directory +import "sub/geo_types.flatdata"; // subdirectory +import "../shared/common.flatdata"; // parent directory +``` + +### Diamond and Cyclic Imports + +Diamond imports (the same file imported via multiple paths) are deduplicated +automatically. Cyclic imports are also supported — a parent archive schema can +import a child schema that imports the parent back. + +### Generated Code + +For **C++** and **Rust**, the generator uses separate compilation: only types +from the root file are emitted, with include/import directives referencing +the separately generated imported files. Each `.flatdata` file must be +generated individually. + +For **Python**, **Dot**, and **Flatdata** output, all types are emitted +monolithically. + +### Example + +``` +schema/ +├── types.flatdata +└── main.flatdata +``` + +```cpp +// types.flatdata +namespace geo { + struct Point { + x : u32 : 32; + y : u32 : 32; + } +} +``` + +```cpp +// main.flatdata +import "types.flatdata"; +namespace app { + archive Locations { + points : vector< .geo.Point >; + } +} +``` + +Generate each file separately: + +```sh +flatdata-generator -s schema/types.flatdata -g cpp -O schema/types.h +flatdata-generator -s schema/main.flatdata -g cpp -O schema/main.h +``` + +The generated `main.h` will contain `#include "types.h"` and only define the +`app::Locations` archive. + +### Rust Project Setup + +Each generated Rust file must live in its own module, with all imported schemas +as siblings under a common parent module: + +``` +my_crate/ +├── build.rs +└── src/ + └── schema/ + ├── mod.rs // pub mod types; pub mod main_schema; + ├── types.rs // include!(concat!(env!("OUT_DIR"), "/schema/types.rs")); + └── main_schema.rs // include!(concat!(env!("OUT_DIR"), "/schema/main.rs")); +``` + +The generated code uses `pub use super::...::module::namespace::*;` re-exports +to wire imported types through the module hierarchy. diff --git a/flatdata-cpp/cmake/flatdata/GenerateSource.cmake b/flatdata-cpp/cmake/flatdata/GenerateSource.cmake index 3974cf42..69a9ff5f 100644 --- a/flatdata-cpp/cmake/flatdata/GenerateSource.cmake +++ b/flatdata-cpp/cmake/flatdata/GenerateSource.cmake @@ -15,15 +15,23 @@ function(flatdata_generate_source TARGET_NAME SCHEMA_FILENAME OUTPUT_FILENAME) file(GLOB_RECURSE FLATDATA_GENERATOR_SOURCES ${FLATDATA_GENERATOR_PATH}/**/*.py) file(GLOB_RECURSE FLATDATA_GENERATOR_TEMPLATES ${FLATDATA_GENERATOR_PATH}/**/*.jinja2) + set(DEPFILE ${OUTPUT_FILENAME}.d) + set(DEPFILE_ARGS) + if(CMAKE_VERSION VERSION_GREATER_EQUAL "3.20") + set(DEPFILE_ARGS DEPFILE ${DEPFILE}) + endif() + add_custom_command( OUTPUT ${OUTPUT_FILENAME} COMMAND ${PYTHON3_EXECUTABLE} ${FLATDATA_GENERATOR_PATH}/generator.py --gen cpp --schema ${SCHEMA_FILENAME} --output-file ${OUTPUT_FILENAME} + --depfile ${DEPFILE} DEPENDS ${FLATDATA_GENERATOR_SOURCES} DEPENDS ${FLATDATA_GENERATOR_TEMPLATES} DEPENDS ${SCHEMA_FILENAME} + ${DEPFILE_ARGS} WORKING_DIRECTORY ${GENERATOR_PATH} COMMENT "Generating sources from flatdata schema" ) diff --git a/flatdata-cpp/test/CMakeLists.txt b/flatdata-cpp/test/CMakeLists.txt index b04604be..085667ed 100644 --- a/flatdata-cpp/test/CMakeLists.txt +++ b/flatdata-cpp/test/CMakeLists.txt @@ -12,8 +12,35 @@ flatdata_generate_source(generate_flatdata_test_case_ranges ${CMAKE_CURRENT_SOURCE_DIR}/../../test_cases/archives/ranges.flatdata ${CMAKE_CURRENT_BINARY_DIR}/generated/ranges.hpp) +# Import feature test cases: generate both imported and root schemas +# The root schema (main) produces #include "types.h", so both files must be +# in the same generated directory. +flatdata_generate_source(generate_import_simple_types + ${CMAKE_CURRENT_SOURCE_DIR}/../../test_cases/imports/simple/types.flatdata + ${CMAKE_CURRENT_BINARY_DIR}/generated/imports/simple/types.h) + +flatdata_generate_source(generate_import_simple_main + ${CMAKE_CURRENT_SOURCE_DIR}/../../test_cases/imports/simple/main.flatdata + ${CMAKE_CURRENT_BINARY_DIR}/generated/imports/simple/main.h) + +flatdata_generate_source(generate_import_cross_ns_other + ${CMAKE_CURRENT_SOURCE_DIR}/../../test_cases/imports/cross_namespace/other.flatdata + ${CMAKE_CURRENT_BINARY_DIR}/generated/imports/cross_namespace/other.h) + +flatdata_generate_source(generate_import_cross_ns_main + ${CMAKE_CURRENT_SOURCE_DIR}/../../test_cases/imports/cross_namespace/main.flatdata + ${CMAKE_CURRENT_BINARY_DIR}/generated/imports/cross_namespace/main.h) + add_executable(flatdata_test ${TEST_FLATDATA_SOURCES}) -add_dependencies(flatdata_test generate_flatdata_test_code generate_flatdata_test_code2 generate_flatdata_test_case_ranges) +add_dependencies(flatdata_test + generate_flatdata_test_code + generate_flatdata_test_code2 + generate_flatdata_test_case_ranges + generate_import_simple_types + generate_import_simple_main + generate_import_cross_ns_other + generate_import_cross_ns_main +) target_include_directories(flatdata_test PRIVATE ${Boost_INCLUDE_DIRS} diff --git a/flatdata-cpp/test/ImportTest.cpp b/flatdata-cpp/test/ImportTest.cpp new file mode 100644 index 00000000..bbb53ce1 --- /dev/null +++ b/flatdata-cpp/test/ImportTest.cpp @@ -0,0 +1,52 @@ +/** + * Copyright (c) 2025 HERE Europe B.V. + * See the LICENSE file in the root of this project for license details. + */ + +// Test that code generated from schemas with imports compiles and works correctly. +// The "simple" test case: main.flatdata imports types.flatdata +// main.h is generated with #include "types.h" and only defines the local archive. +// types.h defines the struct from the imported file. +#include "imports/simple/main.h" + +// The "cross_namespace" test case: main.flatdata imports other.flatdata (different namespace) +#include "imports/cross_namespace/main.h" + +#include +#include "catch_amalgamated.hpp" + +TEST_CASE( "imported_types_are_usable_in_archive", "[Import]" ) +{ + std::shared_ptr< flatdata::ResourceStorage > storage + = flatdata::MemoryResourceStorage::create( ); + auto builder = app::ABuilder::open( storage ); + REQUIRE( builder.is_open( ) ); + + auto data = builder.start_data( ); + data.grow( ).x = 42; + data.grow( ).y = 100; + data.close( ); + + auto archive = app::A::open( storage ); + REQUIRE( archive.data( ).size( ) == 2 ); + REQUIRE( archive.data( )[ 0 ].x == 42 ); + REQUIRE( archive.data( )[ 1 ].y == 100 ); +} + +TEST_CASE( "cross_namespace_imported_enum_works", "[Import]" ) +{ + std::shared_ptr< flatdata::ResourceStorage > storage + = flatdata::MemoryResourceStorage::create( ); + auto builder = app::MainBuilder::open( storage ); + REQUIRE( builder.is_open( ) ); + + auto entries = builder.start_entries( ); + entries.grow( ).id = 7; + entries.grow( ).kind = ::defs::Kind::B; + entries.close( ); + + auto archive = app::Main::open( storage ); + REQUIRE( archive.entries( ).size( ) == 2 ); + REQUIRE( archive.entries( )[ 0 ].id == 7 ); + REQUIRE( archive.entries( )[ 1 ].kind == ::defs::Kind::B ); +} diff --git a/flatdata-generator/README.md b/flatdata-generator/README.md index 75ea43e6..779843e8 100644 --- a/flatdata-generator/README.md +++ b/flatdata-generator/README.md @@ -16,6 +16,26 @@ pip3 install flatdata-generator flatdata-generator -s locations.flatdata -g cpp -O locations.hpp ``` +### Multi-file Schemas + +When a schema uses `import` statements, each file should be generated +separately. Imported types are referenced via include/import directives rather +than being re-emitted: + +```sh +# Generate shared types +flatdata-generator -s schema/types.flatdata -g cpp -O schema/types.h + +# Generate main schema (will #include "types.h") +flatdata-generator -s schema/main.flatdata -g cpp -O schema/main.h +``` + +For Rust, the same approach applies — each imported file becomes its own module +with `pub use` re-exports connecting the namespaces. + +Python and Dot generators emit all types monolithically (no separate generation +needed for the root file — all imported definitions are included in the output). + Currently supported target languages: * C++ @@ -30,9 +50,14 @@ Currently supported target languages: The `flatdata` generator works in several stages which are clearly separated from one another and can be extended/tested in isolation: -1. **Parse the source schema** file using `pyparsing` library. Grammar +1. **Resolve imports** starting from the root schema file. The importer + (`importer.py`) performs a depth-first traversal of import statements, + deduplicating files and handling cyclic imports. The result is an ordered + list of resolved files with their parsed content. + +2. **Parse the source schema** file using `pyparsing` library. Grammar for the schema is defined in `grammar.py` -2. **Construct a node tree** out of `pyparsing.ParseResults`. The node tree +3. **Construct a node tree** out of `pyparsing.ParseResults`. The node tree contains entities for every construct of flatdata grammar, organized in hierarchical order, allowing non-tree references between nodes: @@ -49,7 +74,7 @@ The `flatdata` generator works in several stages which are clearly separated fro - `TypeReference` - model type dependencies, which are used during topological sorting at a later stage and for schema resolution. -3. **Augment the tree** with structures and references that are not +4. **Augment the tree** with structures and references that are not directly corresponding to `pyparsing.ParseResults` or needed to implement advanced features. Among these: @@ -59,17 +84,17 @@ The `flatdata` generator works in several stages which are clearly separated fro - **Add constant references** to all archives so that constants are available for schema resolution. -4. **Resolve references** iterates through all references and tries to +5. **Resolve references** iterates through all references and tries to find a node they refer to, either in: - Parent scopes until (inclusive) innermost parent namespace. - Root node if path is fully qualified. -5. **Perform topological sorting** to detect cycles in between entities +6. **Perform topological sorting** to detect cycles in between entities and to determine the order of serialization for targets that depend on one. -6. **Generate the source code** using nodes in topological order *and/or* +7. **Generate the source code** using nodes in topological order *and/or* the tree (depending on the generator architecture - recursive descent or iterative). @@ -87,6 +112,11 @@ Node tree enforces several properties of the flatdata schema: participate in topological sorting of the DAG formed by the tree edges and edges between source and target of a `TypeReference` +When building a tree from multiple files, each node is tagged with its +`source_file` (the file it was defined in) and an `is_local` flag +(whether it belongs to the root file being generated). This allows +generators to filter nodes for separate compilation. + ### References Reference names are mangled so they are not ambiguous with other paths diff --git a/flatdata-generator/flatdata/generator/app.py b/flatdata-generator/flatdata/generator/app.py index 4e13bfd8..35e3cc1d 100755 --- a/flatdata-generator/flatdata/generator/app.py +++ b/flatdata-generator/flatdata/generator/app.py @@ -19,6 +19,7 @@ from flatdata.generator.engine import Engine from flatdata.generator.tree.errors import FlatdataSyntaxError +from flatdata.generator.tree.syntax_tree import SyntaxTree def _parse_command_line() -> argparse.Namespace: @@ -32,6 +33,8 @@ def _parse_command_line() -> argparse.Namespace: parser.add_argument("-O", "--output-file", type=str, required=True, default=None, help="Destination file. Forces all output to be stored in one file") + parser.add_argument("-d", "--depfile", type=str, default=None, + help="Write a Makefile-style dependency file listing all imported schemas") parser.add_argument("-v", "--verbose", action="store_true", help="Enable verbose mode") parser.add_argument("--debug", action="store_true", @@ -62,14 +65,12 @@ def _run(args: argparse.Namespace) -> None: _setup_logging(args) _check_args(args) - with open(args.schema, 'r') as input_file: - schema = input_file.read() - try: - engine = Engine(schema) - logging.debug("Tree: %s", engine.tree) - except FlatdataSyntaxError as ex: - logging.fatal("Error reading schema: %s ", ex) - sys.exit(1) + try: + engine = Engine.from_file(args.schema) + logging.debug("Tree: %s", engine.tree) + except FlatdataSyntaxError as ex: + logging.fatal("Error reading schema: %s ", ex) + sys.exit(1) try: logging.info("Generating %s...", args.gen) @@ -85,6 +86,26 @@ def _run(args: argparse.Namespace) -> None: output.write(output_content) logging.info("Code for %s is written to %s", args.gen, args.output_file) + if args.depfile: + _write_depfile(args.depfile, args.output_file, args.schema, engine.tree) + + +def _write_depfile(depfile_path: str, output_file: str, schema_file: str, + tree: 'SyntaxTree') -> None: + """Write a Makefile-style depfile listing all schema dependencies.""" + deps = [os.path.abspath(schema_file)] + # source_file_map keys are absolute paths of all imported files + deps.extend(sorted(tree.source_file_map.keys())) + + # Escape spaces in paths for Make syntax + def escape(p: str) -> str: + return p.replace(" ", "\\ ") + + dep_str = " ".join(escape(d) for d in deps) + with open(depfile_path, "w") as f: + f.write(f"{escape(output_file)}: {dep_str}\n") + logging.info("Depfile written to %s", depfile_path) + def main() -> None: """Entrypoint""" diff --git a/flatdata-generator/flatdata/generator/engine.py b/flatdata-generator/flatdata/generator/engine.py index c5e43caa..fdac6acb 100644 --- a/flatdata-generator/flatdata/generator/engine.py +++ b/flatdata-generator/flatdata/generator/engine.py @@ -6,7 +6,7 @@ import types from typing import overload -from flatdata.generator.tree.builder import build_ast +from flatdata.generator.tree.builder import build_ast, build_ast_from_file from flatdata.generator.tree.nodes.trivial.namespace import Namespace from flatdata.generator.tree.nodes.node import Node from flatdata.generator.tree.syntax_tree import SyntaxTree @@ -40,6 +40,17 @@ def available_generators(cls) -> list[str]: """ return list(cls._GENERATORS.keys()) + @classmethod + def from_file(cls, path: str) -> 'Engine': + """ + Create Engine from a schema file, resolving imports. + :raises FlatdataSyntaxError + """ + engine = cls.__new__(cls) + engine.tree = build_ast_from_file(path) + engine.schema = engine.tree.root_schema or "" + return engine + def __init__(self, schema: str) -> None: """ Instantiates generator engine for a given schema. diff --git a/flatdata-generator/flatdata/generator/generators/__init__.py b/flatdata-generator/flatdata/generator/generators/__init__.py index b4439375..6ea11ef2 100644 --- a/flatdata-generator/flatdata/generator/generators/__init__.py +++ b/flatdata-generator/flatdata/generator/generators/__init__.py @@ -13,8 +13,9 @@ from jinja2.parser import Parser from flatdata.generator.tree.nodes.archive import Archive +from flatdata.generator.tree.nodes.node import Node from flatdata.generator.tree.nodes.trivial import Structure, Enumeration, Constant, Namespace -from flatdata.generator.tree.nodes.references import InvalidValueReference, EnumerationReference +from flatdata.generator.tree.nodes.references import EnumerationReference from flatdata.generator.tree.nodes.resources import ResourceBase, BoundResource, Archive as \ ArchiveResource, Vector, Multivector, Instance, RawData from flatdata.generator.tree.syntax_tree import SyntaxTree @@ -34,10 +35,18 @@ def supported_nodes(self) -> list[type]: "Derived generators must implement _supported_nodes") @abstractmethod - def _populate_environment(self, env: Environment) -> None: + def _populate_environment(self, env: Environment, tree: SyntaxTree) -> None: raise RuntimeError( "Derived generators must implement _populate_filters") + def filter_nodes(self, nodes: list[Node], tree: SyntaxTree) -> list[Node]: + """Filter nodes for rendering. Override for separate compilation.""" + return nodes + + def get_import_directives(self, tree: SyntaxTree) -> list[str]: + """Return language-specific import directives. Override in subclasses.""" + return [] + def render(self, tree: SyntaxTree) -> str: """Generate the language implementation from the AST""" env = Environment(loader=PackageLoader('flatdata.generator', 'templates'), lstrip_blocks=True, @@ -61,12 +70,14 @@ def render(self, tree: SyntaxTree) -> str: n, Structure) and "_builtin.multivector" in SyntaxTree.namespace_path(n)) env.filters['namespaces'] = SyntaxTree.namespaces env.filters['not_auto_generated'] = lambda n: [ x for x in n if not x.auto_generated] - self._populate_environment(env) + self._populate_environment(env, tree) template = env.get_template(self._template) flatdata_nodes = [n for n, _ in DfsTraversal(tree).dependency_order() if any([isinstance(n, t) for t in self.supported_nodes()])] - return template.render(nodes=flatdata_nodes, tree=tree) + filtered_nodes = self.filter_nodes(flatdata_nodes, tree) + imports = self.get_import_directives(tree) + return template.render(nodes=filtered_nodes, tree=tree, imports=imports) class RaiseExtension(Extension): diff --git a/flatdata-generator/flatdata/generator/generators/cpp.py b/flatdata-generator/flatdata/generator/generators/cpp.py index 3edce7fd..ea41dcab 100644 --- a/flatdata-generator/flatdata/generator/generators/cpp.py +++ b/flatdata-generator/flatdata/generator/generators/cpp.py @@ -3,6 +3,8 @@ See the LICENSE file in the root of this project for license details. ''' +import posixpath + from jinja2 import Environment from flatdata.generator.tree.helpers.basictype import BasicType @@ -13,6 +15,7 @@ ResourceBase, Archive as ArchiveResource from flatdata.generator.tree.nodes.trivial import Structure, Enumeration, Constant, Field from flatdata.generator.tree.nodes.archive import Archive +from flatdata.generator.tree.syntax_tree import SyntaxTree from . import BaseGenerator @@ -25,7 +28,15 @@ def __init__(self) -> None: def supported_nodes(self) -> list[type]: return [Structure, Archive, Constant, Enumeration] - def _populate_environment(self, env: Environment) -> None: + def filter_nodes(self, nodes: list[Node], tree: SyntaxTree) -> list[Node]: + if not tree.imports: + return nodes + return [n for n in nodes if tree.is_local_node(n)] + + def get_import_directives(self, tree: SyntaxTree) -> list[str]: + return [posixpath.normpath(imp.path).replace('.flatdata', '.h') for imp in tree.imports] + + def _populate_environment(self, env: Environment, tree: SyntaxTree) -> None: env.filters["cpp_doc"] = lambda value: value def _safe_cpp_string_line(value: str) -> str: diff --git a/flatdata-generator/flatdata/generator/generators/dot.py b/flatdata-generator/flatdata/generator/generators/dot.py index de190bd1..a7ab00e1 100644 --- a/flatdata-generator/flatdata/generator/generators/dot.py +++ b/flatdata-generator/flatdata/generator/generators/dot.py @@ -5,6 +5,7 @@ from flatdata.generator.tree.nodes.archive import Archive from flatdata.generator.tree.nodes.trivial import Field +from flatdata.generator.tree.syntax_tree import SyntaxTree from . import BaseGenerator from jinja2 import Environment @@ -19,7 +20,7 @@ class DotGenerator(BaseGenerator): def __init__(self) -> None: BaseGenerator.__init__(self, "dot/dot.jinja2") - def _populate_environment(self, env: Environment) -> None: + def _populate_environment(self, env: Environment, tree: SyntaxTree) -> None: env.autoescape = True def _field_value_type(field: Field) -> str: diff --git a/flatdata-generator/flatdata/generator/generators/flatdata.py b/flatdata-generator/flatdata/generator/generators/flatdata.py index 8a522a3d..a089b2be 100644 --- a/flatdata-generator/flatdata/generator/generators/flatdata.py +++ b/flatdata-generator/flatdata/generator/generators/flatdata.py @@ -23,7 +23,7 @@ def __init__(self) -> None: def supported_nodes(self) -> list[type]: return [Structure, Archive, Constant, Enumeration] - def _populate_environment(self, env: Environment) -> None: + def _populate_environment(self, env: Environment, tree: SyntaxTree) -> None: def _is_builtin(node: Node) -> bool: for namespace in SyntaxTree.namespaces(node): if namespace.name == "_builtin": diff --git a/flatdata-generator/flatdata/generator/generators/python.py b/flatdata-generator/flatdata/generator/generators/python.py index 0f7b0c19..f19e732e 100644 --- a/flatdata-generator/flatdata/generator/generators/python.py +++ b/flatdata-generator/flatdata/generator/generators/python.py @@ -24,7 +24,7 @@ def __init__(self) -> None: def supported_nodes(self) -> list[type]: return [Structure, Archive] - def _populate_environment(self, env: Environment) -> None: + def _populate_environment(self, env: Environment, tree: SyntaxTree) -> None: def _decorate_archive_type(tree: SyntaxTree, value: Node) -> str: assert isinstance(value, Node) return str(tree.namespace_path(value, "_") + "_" + value.name) diff --git a/flatdata-generator/flatdata/generator/generators/rust.py b/flatdata-generator/flatdata/generator/generators/rust.py index 80c4fe00..4088f181 100644 --- a/flatdata-generator/flatdata/generator/generators/rust.py +++ b/flatdata-generator/flatdata/generator/generators/rust.py @@ -2,6 +2,7 @@ Copyright (c) 2018 HERE Europe B.V. See the LICENSE file in the root of this project for license details. ''' +import posixpath import re from jinja2 import Environment @@ -9,7 +10,7 @@ from flatdata.generator.tree.nodes.node import Node from flatdata.generator.tree.nodes.resources import (Vector, Multivector, Instance, RawData, BoundResource, Archive as ArchiveResource) -from flatdata.generator.tree.nodes.trivial import Structure, Constant, Enumeration, Field +from flatdata.generator.tree.nodes.trivial import Structure, Constant, Enumeration, Namespace, Field from flatdata.generator.tree.helpers.enumtype import EnumType from flatdata.generator.tree.nodes.archive import Archive from flatdata.generator.tree.syntax_tree import SyntaxTree @@ -32,6 +33,59 @@ def __init__(self) -> None: def supported_nodes(self) -> list[type]: return [Structure, Archive, Constant, Enumeration] + def filter_nodes(self, nodes: list[Node], tree: SyntaxTree) -> list[Node]: + # Rust template traverses tree.root.children directly, not the nodes + # list. Filtering is handled in the template via tree.is_local_node(). + return nodes + + @staticmethod + def _import_reexports_for_namespace(ns: Node, tree: SyntaxTree) -> list[str]: + """Return Rust pub use directives for imported types in a namespace.""" + if not tree.imports: + return [] + # Collect source files of non-local direct children + import_sources: set[str] = set() + for child in ns.children: + if not isinstance(child, Namespace) and not tree.is_local_node(child): + if child.source_file: + import_sources.add(child.source_file) + if not import_sources: + return [] + # Build namespace path (e.g., "a::b::c") + ns_parts: list[str] = [] + current: Node | None = ns + while current is not None and current.parent is not None: + ns_parts.append(current.name) + current = current.parent + ns_parts.reverse() + ns_path = "::".join(ns_parts) + # Map source files to module paths via source_file_map + reexports: list[str] = [] + seen_modules: set[str] = set() + for source_abs in import_sources: + rel_path = tree.source_file_map.get(source_abs) + if rel_path is None: + continue + normalized = posixpath.normpath(rel_path).replace('.flatdata', '') + parts = normalized.split('/') + # Each leading ".." requires an extra super:: to go up + # one more level in the module tree + dotdot_count = 0 + while dotdot_count < len(parts) and parts[dotdot_count] == '..': + dotdot_count += 1 + remaining = parts[dotdot_count:] + # super:: count: + # len(ns_parts) to escape the namespace module nesting + # + 1 to go from file-level module to its parent (sibling access) + # + dotdot_count for each ".." directory traversal + super_prefix = "::".join(["super"] * (len(ns_parts) + 1 + dotdot_count)) + module_path = "::".join(remaining) + full_path = f"{super_prefix}::{module_path}" + if full_path not in seen_modules: + seen_modules.add(full_path) + reexports.append(f"pub use {full_path}::{ns_path}::*;") + return reexports + @staticmethod def _format_numeric_literal(value: str) -> str: try: @@ -43,7 +97,9 @@ def _format_numeric_literal(value: str) -> str: except ValueError: return value - def _populate_environment(self, env: Environment) -> None: + def _populate_environment(self, env: Environment, tree: SyntaxTree) -> None: + env.globals["import_reexports_for_namespace"] = lambda ns: self._import_reexports_for_namespace(ns, tree) + def _camel_to_snake_case(expr: str) -> str: step1 = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', expr) return re.sub('([a-z0-9])(A-Z)', r'\1_\2', step1).lower() diff --git a/flatdata-generator/flatdata/generator/grammar.py b/flatdata-generator/flatdata/generator/grammar.py index 37f5ac06..ef2c3c73 100644 --- a/flatdata-generator/flatdata/generator/grammar.py +++ b/flatdata-generator/flatdata/generator/grammar.py @@ -9,7 +9,7 @@ Word, alphas, alphanums, nums, cppStyleComment, Keyword, Group, Optional, Or, OneOrMore, delimitedList, ZeroOrMore, hexnums, Combine, FollowedBy, ParseException as pyparsingParseException, - ParseResults + ParseResults, QuotedString, Suppress, Literal ) ParseException = pyparsingParseException @@ -194,9 +194,8 @@ def _combine_list(t: ParseResults) -> str: comment.setResultsName("comment", listAllMatches=True) ) -free_comments = Optional(OneOrMore(comment)("comment")) - namespace = Group( + Optional(comment)("doc") + Keyword("namespace") + qualified_identifier("name") + "{" + @@ -204,6 +203,19 @@ def _combine_list(t: ParseResults) -> str: Optional(comment) ) -flatdata_grammar = Group(free_comments + - OneOrMore(namespace)("namespace") - )("flatdata") +import_statement = Group( + Optional(comment)("doc") + + Suppress(Keyword("import")) + + QuotedString('"')("path") + + Suppress(Literal(';')) +) + +top_level_entry = ( + import_statement.setResultsName("imports", listAllMatches=True) | + namespace.setResultsName("namespace", listAllMatches=True) | + comment +) + +flatdata_grammar = Group( + ZeroOrMore(top_level_entry) +)("flatdata") diff --git a/flatdata-generator/flatdata/generator/templates/cpp/cpp.jinja2 b/flatdata-generator/flatdata/generator/templates/cpp/cpp.jinja2 index cc100d4a..19a774d7 100644 --- a/flatdata-generator/flatdata/generator/templates/cpp/cpp.jinja2 +++ b/flatdata-generator/flatdata/generator/templates/cpp/cpp.jinja2 @@ -7,6 +7,9 @@ #pragma once #include +{% for include_path in imports %} +#include "{{ include_path }}" +{% endfor %} #include #include #include diff --git a/flatdata-generator/flatdata/generator/templates/rust/rust.jinja2 b/flatdata-generator/flatdata/generator/templates/rust/rust.jinja2 index 90277125..da1377e1 100644 --- a/flatdata-generator/flatdata/generator/templates/rust/rust.jinja2 +++ b/flatdata-generator/flatdata/generator/templates/rust/rust.jinja2 @@ -16,9 +16,13 @@ pub mod {{ ns.name }} { #[allow(unused_imports)] use flatdata::{flatdata_read_bytes, flatdata_write_bytes}; +{% for reexport in import_reexports_for_namespace(ns) %} +{{ reexport }} +{% endfor %} {{ loop(ns.children) }} {%- for node in ns.children %} + {% if not tree.imports or tree.is_local_node(node) %} {% if node | is_structure and not node | is_multivector_index %} {{- structure.declaration(node) }} {% elif node | is_structure and node | is_multivector_index %} @@ -30,11 +34,12 @@ use flatdata::{flatdata_read_bytes, flatdata_write_bytes}; {% elif node | is_constant %} {{- constant.declaration(node) }} {% endif %} + {% endif %} {% endfor %} #[doc(hidden)] pub mod schema { -{% for node in ns.children if node | is_archive %} +{% for node in ns.children if node | is_archive and (not tree.imports or tree.is_local_node(node)) %} {% set archive_ns = node.name | camel_to_snake_case %} pub mod {{ archive_ns }} { diff --git a/flatdata-generator/flatdata/generator/tree/builder.py b/flatdata-generator/flatdata/generator/tree/builder.py index 9d623d72..12b23fe6 100644 --- a/flatdata-generator/flatdata/generator/tree/builder.py +++ b/flatdata-generator/flatdata/generator/tree/builder.py @@ -3,6 +3,9 @@ See the LICENSE file in the root of this project for license details. ''' +import os +from typing import Any + from pyparsing import ParseException, ParseSyntaxException import flatdata.generator.tree.nodes.trivial as nodes @@ -10,7 +13,8 @@ from flatdata.generator.tree.errors import ( InvalidEnumWidthError, InvalidRangeName, InvalidRangeReference, InvalidConstReference, InvalidConstValueReference, DuplicateInvalidValueReference, - InvalidStructInExplicitReference, OptionalRange) + InvalidStructInExplicitReference, OptionalRange, ParsingError, ImportParsingError, + UnresolvedImportError) from flatdata.generator.tree.nodes.explicit_reference import ExplicitReference from flatdata.generator.tree.nodes.archive import Archive from flatdata.generator.tree.nodes.node import Node @@ -20,12 +24,12 @@ BuiltinStructureReference, ConstantReference, ConstantValueReference, EnumerationReference, StructureReference, InvalidValueReference) from flatdata.generator.tree.nodes.root import Root -from flatdata.generator.tree.errors import ParsingError from flatdata.generator.tree.traversal import DfsTraversal from flatdata.generator.tree.helpers.basictype import BasicType from flatdata.generator.tree.helpers.enumtype import EnumType from .resolver import resolve_references +from .importer import resolve_imports def _create_nested_namespaces(path: str) -> tuple[nodes.Namespace, nodes.Namespace]: @@ -91,6 +95,23 @@ def _build_node_tree(definition: str) -> Root: except (ParseException, ParseSyntaxException) as err: raise ParsingError(err) + if "imports" in parsed: + raise UnresolvedImportError() + + roots = _build_namespace_roots(parsed) + return _merge_roots(roots) + + +def _build_namespace_roots(parsed: Any, + source_file: str | None = None, + is_local: bool = True) -> list[nodes.Namespace]: + """ + Build per-namespace chain roots from a parsed grammar result. + + Returns a list of namespace chain roots (e.g. a -> b -> c for + namespace a.b.c { ... }). Each definition node and its descendants + are tagged with *source_file* and *is_local*. + """ roots: list[nodes.Namespace] = [] for namespace in parsed.namespace: @@ -106,12 +127,23 @@ def _build_node_tree(definition: str) -> Root: for collection, cls in parsed_items: for item in collection: - target_namespace.insert(cls.create(properties=item, # type: ignore[attr-defined] # subclasses (Structure, Enumeration, Archive) define create() - definition=definition)) + node = cls.create(properties=item) # type: ignore[attr-defined] # subclasses define create() + _tag_node_tree(node, source_file=source_file, + is_local=is_local) + target_namespace.insert(node) roots.append(root_namespace) - return _merge_roots(roots) + return roots + + +def _tag_node_tree(node: Node, source_file: str | None, + is_local: bool) -> None: + """Set source_file and is_local on a node and all its descendants.""" + for descendant in node.iterate(): + descendant.source_file = source_file + descendant.is_local = is_local + def _append_builtin_structures(root: Root) -> None: @@ -122,9 +154,14 @@ def _append_builtin_structures(root: Root) -> None: for builtin in node.builtins: found = namespace.get_relative(builtin.name) if found is None: + _tag_node_tree(builtin, source_file=node.source_file, + is_local=node.is_local) namespace.insert(builtin) found = namespace.find_relative(builtin.name) - node.insert(BuiltinStructureReference(name=found.path)) + ref = BuiltinStructureReference(name=found.path) + ref.source_file = node.source_file + ref.is_local = node.is_local + node.insert(ref) def _append_constant_references(root: Root) -> None: @@ -134,7 +171,10 @@ def _append_constant_references(root: Root) -> None: for archive in archives: for constant in constants: if not constant.path in constant_references: - archive.insert(ConstantValueReference(constant.path)) + ref = ConstantValueReference(constant.path) + ref.source_file = archive.source_file + ref.is_local = archive.is_local + archive.insert(ref) def _update_field_type_references(root: Root) -> None: @@ -221,17 +261,56 @@ def _check_explicit_references(root: Root) -> None: if not ref.target in [x.target for x in reference.parent.children_like(StructureReference)]: raise InvalidStructInExplicitReference(ref.node.name, reference.parent.name) -def build_ast(definition: str) -> SyntaxTree: - """Build the Flatdata syntax tree from a definition""" - root = _build_node_tree(definition=definition) +def _run_pipeline(root: Root) -> None: + """Run the post-merge AST pipeline (builtin expansion, resolution, validation).""" _append_builtin_structures(root) resolve_references(root) _append_constant_references(root) _check_ranges(root) - # now compute data based on resolved references _update_field_type_references(root) _compute_structure_sizes(root) _compute_max_resource_size(root) _check_const_refs(root) _check_explicit_references(root) + + +def build_ast(definition: str) -> SyntaxTree: + """Build the Flatdata syntax tree from a schema string.""" + root = _build_node_tree(definition=definition) + _run_pipeline(root) return SyntaxTree(root) + + +def build_ast_from_file(path: str) -> SyntaxTree: + """Build the Flatdata syntax tree from a schema file, resolving imports.""" + try: + resolved_files, import_infos = resolve_imports(path) + except ImportParsingError as e: + if e.referenced_from is None: + raise ParsingError(e.pyparsing_error) from e + raise + + all_namespace_roots: list[nodes.Namespace] = [] + root_abs_path = os.path.realpath(path) + root_dir = os.path.dirname(root_abs_path) + root_content: str | None = None + + # Build mapping from abs_path to relative path for all imported files + source_file_map: dict[str, str] = {} + for resolved_file in resolved_files: + is_root = resolved_file.abs_path == root_abs_path + if is_root: + root_content = resolved_file.content + else: + rel_path = os.path.relpath(resolved_file.abs_path, root_dir).replace(os.sep, '/') + source_file_map[resolved_file.abs_path] = rel_path + file_roots = _build_namespace_roots( + resolved_file.parsed, source_file=resolved_file.abs_path, + is_local=is_root) + all_namespace_roots.extend(file_roots) + + root = _merge_roots(all_namespace_roots) + _run_pipeline(root) + + return SyntaxTree(root, imports=import_infos, root_schema=root_content, + source_file_map=source_file_map) diff --git a/flatdata-generator/flatdata/generator/tree/errors.py b/flatdata-generator/flatdata/generator/tree/errors.py index d0a392db..83cac467 100644 --- a/flatdata-generator/flatdata/generator/tree/errors.py +++ b/flatdata-generator/flatdata/generator/tree/errors.py @@ -26,6 +26,13 @@ def __init__(self, duplicate: Node, existing: Node) -> None: existing=existing)) +class UnresolvedImportError(FlatdataSyntaxError): + def __init__(self) -> None: + super().__init__( + "Import statements found in schema string. " + "Use Engine.from_file() or build_ast_from_file() to resolve imports.") + + class CircularReferencing(FlatdataSyntaxError): def __init__(self, node: Node, child: Node) -> None: super().__init__( @@ -164,4 +171,29 @@ class OptionalRange(FlatdataSyntaxError): def __init__(self, name: str) -> None: super().__init__( "@range cannot be combined with @optional, store empty ranges instead: {name}" - .format(name=name)) \ No newline at end of file + .format(name=name)) + + +class ImportFileNotFoundError(FlatdataSyntaxError): + def __init__(self, path: str, referenced_from: str) -> None: + super().__init__( + "Imported file not found: \"{path}\" (referenced from {referenced_from})" + .format(path=path, referenced_from=referenced_from)) + + +class ImportParsingError(FlatdataSyntaxError): + def __init__(self, file_path: str, pyparsing_error: ParseBaseException, + referenced_from: str | None = None) -> None: + self.referenced_from = referenced_from + self.pyparsing_error = pyparsing_error + context = " (imported from {ref})".format(ref=referenced_from) if referenced_from else "" + super().__init__( + "Failed to parse {path}{context}. Details below:\n" + " {line}\n" + " {pointer}\n" + " {message}".format( + path=file_path, + context=context, + line=pyparsing_error.line, + pointer=" " * (pyparsing_error.column - 1) + "^", + message=str(pyparsing_error))) diff --git a/flatdata-generator/flatdata/generator/tree/importer.py b/flatdata-generator/flatdata/generator/tree/importer.py new file mode 100644 index 00000000..6a95d0ce --- /dev/null +++ b/flatdata-generator/flatdata/generator/tree/importer.py @@ -0,0 +1,127 @@ +''' + Copyright (c) 2025 HERE Europe B.V. + See the LICENSE file in the root of this project for license details. +''' + +from __future__ import annotations + +import logging +import os +from dataclasses import dataclass, field +from typing import Any + +from pyparsing import ParseBaseException + +from ..grammar import flatdata_grammar +from .errors import ImportFileNotFoundError, ImportParsingError + +logger = logging.getLogger(__name__) + + +@dataclass +class ImportInfo: + """Metadata about an import directive.""" + path: str # original import path as written in the schema + abs_path: str # canonical absolute path of the imported file + + +@dataclass +class ResolvedFile: + """A schema file with its imports resolved.""" + abs_path: str # canonical absolute path + content: str # raw file content + imports: list[ImportInfo] # direct imports from this file + parsed: Any = field(repr=False) # cached pyparsing result for builder reuse + + +def resolve_imports(root_path: str) -> tuple[list[ResolvedFile], list[ImportInfo]]: + """ + Recursively resolve all imports starting from root_path. + + Each file is parsed exactly once with the flatdata grammar. The parse + result is cached in ``ResolvedFile.parsed`` so that downstream consumers + (e.g. the AST builder) do not need to re-parse. + + Returns a tuple of: + - list of ResolvedFile in dependency-first order (each file appears exactly once) + - list of ImportInfo for direct imports from the root file + + Handles: + - Diamond imports: same file imported from multiple paths (deduplication via canonical paths) + - Cyclic imports: A imports B, B imports A (visited set prevents infinite recursion) + - Symlinks: resolved via os.path.realpath() to canonical target + + :raises ImportFileNotFoundError: if an imported file does not exist + """ + visited: set[str] = set() + result: list[ResolvedFile] = [] + root_imports: list[ImportInfo] = [] + + def _resolve(file_path: str, referenced_from: str | None) -> None: + canonical = os.path.realpath(file_path) + + if canonical in visited: + return + visited.add(canonical) + + if not os.path.isfile(canonical): + raise ImportFileNotFoundError( + path=file_path, + referenced_from=referenced_from or file_path + ) + + with open(canonical, 'r') as f: + content = f.read() + + try: + parsed = flatdata_grammar.parseString(content, parseAll=True)[0] + except ParseBaseException as e: + raise ImportParsingError( + file_path=canonical, + pyparsing_error=e, + referenced_from=referenced_from + ) + + # Extract import paths from the cached parse result + import_paths = ( + [imp["path"] for imp in parsed["imports"]] + if "imports" in parsed else [] + ) + base_dir = os.path.dirname(canonical) + + imports: list[ImportInfo] = [] + for imp_path in import_paths: + full_imp_path = os.path.join(base_dir, imp_path) + imp_canonical = os.path.realpath(full_imp_path) + + if not os.path.isfile(imp_canonical): + raise ImportFileNotFoundError( + path=imp_path, + referenced_from=canonical + ) + + imports.append(ImportInfo(path=imp_path, abs_path=imp_canonical)) + + if imp_canonical in visited: + logger.debug("Skipping already-visited import: %s (from %s)", + imp_path, canonical) + else: + _resolve(full_imp_path, referenced_from=canonical) + + result.append(ResolvedFile( + abs_path=canonical, + content=content, + imports=imports, + parsed=parsed, + )) + + root_canonical = os.path.realpath(root_path) + _resolve(root_path, referenced_from=None) + + # Extract root file's direct imports + for resolved in result: + if resolved.abs_path == root_canonical: + root_imports = resolved.imports + break + + return result, root_imports diff --git a/flatdata-generator/flatdata/generator/tree/nodes/archive.py b/flatdata-generator/flatdata/generator/tree/nodes/archive.py index 6a5ca176..171718b5 100644 --- a/flatdata-generator/flatdata/generator/tree/nodes/archive.py +++ b/flatdata-generator/flatdata/generator/tree/nodes/archive.py @@ -32,9 +32,8 @@ class Archive(Node): def __init__(self, name: str, properties: ParseResults | None = None) -> None: super().__init__(name=name, properties=properties) - #pylint: disable=unused-argument @staticmethod - def create(properties: ParseResults, definition: str) -> 'Archive': + def create(properties: ParseResults) -> 'Archive': result = Archive(name=properties.name, properties=properties) for resource in properties.resources: result.insert(_create_resource(resource)) diff --git a/flatdata-generator/flatdata/generator/tree/nodes/node.py b/flatdata-generator/flatdata/generator/tree/nodes/node.py index 7263e94e..f8624254 100644 --- a/flatdata-generator/flatdata/generator/tree/nodes/node.py +++ b/flatdata-generator/flatdata/generator/tree/nodes/node.py @@ -46,6 +46,26 @@ def __init__(self, name: str, properties: ParseResults | None = None) -> None: self._properties = properties self._children: OrderedDict[str, Node] = OrderedDict() self._parent: Node | None = None + self._source_file: str | None = None + self._is_local: bool = True + + @property + def source_file(self) -> str | None: + """Returns the source file path this node was defined in, or None.""" + return self._source_file + + @source_file.setter + def source_file(self, value: str | None) -> None: + self._source_file = value + + @property + def is_local(self) -> bool: + """True if this node was defined in the root compilation file.""" + return self._is_local + + @is_local.setter + def is_local(self, value: bool) -> None: + self._is_local = value @property def name(self) -> str: diff --git a/flatdata-generator/flatdata/generator/tree/nodes/resources/multivector.py b/flatdata-generator/flatdata/generator/tree/nodes/resources/multivector.py index 2857a4a0..7ad4afa7 100644 --- a/flatdata-generator/flatdata/generator/tree/nodes/resources/multivector.py +++ b/flatdata-generator/flatdata/generator/tree/nodes/resources/multivector.py @@ -52,5 +52,5 @@ def __getattr__(self, attr: str) -> Any: "schema":"struct IndexType%s { value : u64 : %s; }" % (self._width, self._width), "doc":"/** Builtin type to for MultiVector index */", "fields":[field]}) - index_type = Structure.create(properties=properties, definition="") # type: ignore[arg-type] # MemberDict duck-types ParseResults + index_type = Structure.create(properties=properties) # type: ignore[arg-type] # MemberDict duck-types ParseResults return [index_type] diff --git a/flatdata-generator/flatdata/generator/tree/nodes/trivial/constant.py b/flatdata-generator/flatdata/generator/tree/nodes/trivial/constant.py index 38ad45d7..03a33192 100644 --- a/flatdata-generator/flatdata/generator/tree/nodes/trivial/constant.py +++ b/flatdata-generator/flatdata/generator/tree/nodes/trivial/constant.py @@ -14,7 +14,7 @@ def __init__(self, name: str, properties: ParseResults | None = None) -> None: raise InvalidConstantValueError(name=name, value=self.value) @staticmethod - def create(properties: ParseResults, definition: str) -> 'Constant': + def create(properties: ParseResults) -> 'Constant': result = Constant(name=properties.name, properties=properties) return result diff --git a/flatdata-generator/flatdata/generator/tree/nodes/trivial/enumeration.py b/flatdata-generator/flatdata/generator/tree/nodes/trivial/enumeration.py index f0d88316..57da4ff8 100644 --- a/flatdata-generator/flatdata/generator/tree/nodes/trivial/enumeration.py +++ b/flatdata-generator/flatdata/generator/tree/nodes/trivial/enumeration.py @@ -15,7 +15,7 @@ def __init__(self, name: str, properties: ParseResults | None = None, type: str self._type = BasicType(name=type, width=width) @staticmethod - def create(properties: ParseResults, definition: str) -> 'Enumeration': + def create(properties: ParseResults) -> 'Enumeration': width = None if properties.width: width = int(properties.width) diff --git a/flatdata-generator/flatdata/generator/tree/nodes/trivial/structure.py b/flatdata-generator/flatdata/generator/tree/nodes/trivial/structure.py index 782b53c6..a5588518 100644 --- a/flatdata-generator/flatdata/generator/tree/nodes/trivial/structure.py +++ b/flatdata-generator/flatdata/generator/tree/nodes/trivial/structure.py @@ -16,7 +16,7 @@ def __init__(self, name: str, properties: ParseResults | None = None) -> None: super().__init__(name=name, properties=properties) @staticmethod - def create(properties: ParseResults, definition: str) -> 'Structure': + def create(properties: ParseResults) -> 'Structure': result = Structure(name=properties.name, properties=properties) for field in properties.fields: diff --git a/flatdata-generator/flatdata/generator/tree/syntax_tree.py b/flatdata-generator/flatdata/generator/tree/syntax_tree.py index 66938db7..81f9ebb4 100644 --- a/flatdata-generator/flatdata/generator/tree/syntax_tree.py +++ b/flatdata-generator/flatdata/generator/tree/syntax_tree.py @@ -13,6 +13,7 @@ from flatdata.generator.tree.nodes.node import Node from flatdata.generator.tree.nodes.references import ResourceReference from flatdata.generator.tree.nodes.root import Root +from flatdata.generator.tree.importer import ImportInfo class SyntaxTree: """ @@ -22,8 +23,33 @@ class SyntaxTree: - Schema resolution """ - def __init__(self, root: Root | Node) -> None: + def __init__(self, root: Root | Node, + imports: Sequence[ImportInfo] | None = None, + root_schema: str | None = None, + source_file_map: dict[str, str] | None = None) -> None: self._root = root + self._imports: Sequence[ImportInfo] = imports or [] + self._root_schema = root_schema + self._source_file_map: dict[str, str] = source_file_map or {} + + @property + def imports(self) -> Sequence[ImportInfo]: + """Returns the list of ImportInfo for direct imports of the root file.""" + return self._imports + + @property + def root_schema(self) -> str | None: + """Returns the raw schema text of the root file, or None for string-based builds.""" + return self._root_schema + + @property + def source_file_map(self) -> dict[str, str]: + """Returns mapping from absolute source file path to relative path for all imported files.""" + return self._source_file_map + + def is_local_node(self, node: Node) -> bool: + """True if node was defined in the root compilation file.""" + return node.is_local @property def root(self) -> Root | Node: diff --git a/flatdata-generator/tests/generators/cpp_expectations/imports/cross_namespace/main.h b/flatdata-generator/tests/generators/cpp_expectations/imports/cross_namespace/main.h new file mode 100644 index 00000000..19ff1cd4 --- /dev/null +++ b/flatdata-generator/tests/generators/cpp_expectations/imports/cross_namespace/main.h @@ -0,0 +1 @@ +#include "other.h" diff --git a/flatdata-generator/tests/generators/cpp_expectations/imports/cyclic/parent.h b/flatdata-generator/tests/generators/cpp_expectations/imports/cyclic/parent.h new file mode 100644 index 00000000..f8e4a505 --- /dev/null +++ b/flatdata-generator/tests/generators/cpp_expectations/imports/cyclic/parent.h @@ -0,0 +1 @@ +#include "child.h" diff --git a/flatdata-generator/tests/generators/cpp_expectations/imports/cyclic/parent.h.1 b/flatdata-generator/tests/generators/cpp_expectations/imports/cyclic/parent.h.1 new file mode 100644 index 00000000..9fb00912 --- /dev/null +++ b/flatdata-generator/tests/generators/cpp_expectations/imports/cyclic/parent.h.1 @@ -0,0 +1 @@ +class Parent : public flatdata::Archive diff --git a/flatdata-generator/tests/generators/cpp_expectations/imports/diamond/main.h b/flatdata-generator/tests/generators/cpp_expectations/imports/diamond/main.h new file mode 100644 index 00000000..600af314 --- /dev/null +++ b/flatdata-generator/tests/generators/cpp_expectations/imports/diamond/main.h @@ -0,0 +1,2 @@ +#include "a.h" +#include "b.h" diff --git a/flatdata-generator/tests/generators/cpp_expectations/imports/diamond/main.h.1 b/flatdata-generator/tests/generators/cpp_expectations/imports/diamond/main.h.1 new file mode 100644 index 00000000..de22b9e8 --- /dev/null +++ b/flatdata-generator/tests/generators/cpp_expectations/imports/diamond/main.h.1 @@ -0,0 +1 @@ +class Main : public flatdata::Archive diff --git a/flatdata-generator/tests/generators/cpp_expectations/imports/nested_path/main.h b/flatdata-generator/tests/generators/cpp_expectations/imports/nested_path/main.h new file mode 100644 index 00000000..77ed332f --- /dev/null +++ b/flatdata-generator/tests/generators/cpp_expectations/imports/nested_path/main.h @@ -0,0 +1 @@ +#include "sub/types.h" diff --git a/flatdata-generator/tests/generators/cpp_expectations/imports/nested_path/main.h.1 b/flatdata-generator/tests/generators/cpp_expectations/imports/nested_path/main.h.1 new file mode 100644 index 00000000..58042b4a --- /dev/null +++ b/flatdata-generator/tests/generators/cpp_expectations/imports/nested_path/main.h.1 @@ -0,0 +1 @@ +class A : public flatdata::Archive diff --git a/flatdata-generator/tests/generators/cpp_expectations/imports/simple/main.h b/flatdata-generator/tests/generators/cpp_expectations/imports/simple/main.h new file mode 100644 index 00000000..d3bf1f82 --- /dev/null +++ b/flatdata-generator/tests/generators/cpp_expectations/imports/simple/main.h @@ -0,0 +1 @@ +#include "types.h" \ No newline at end of file diff --git a/flatdata-generator/tests/generators/cpp_expectations/imports/simple/main.h.1 b/flatdata-generator/tests/generators/cpp_expectations/imports/simple/main.h.1 new file mode 100644 index 00000000..0d98e1f8 --- /dev/null +++ b/flatdata-generator/tests/generators/cpp_expectations/imports/simple/main.h.1 @@ -0,0 +1 @@ +class A : public flatdata::Archive \ No newline at end of file diff --git a/flatdata-generator/tests/generators/cpp_expectations/imports/transitive/main.h b/flatdata-generator/tests/generators/cpp_expectations/imports/transitive/main.h new file mode 100644 index 00000000..65fc69c3 --- /dev/null +++ b/flatdata-generator/tests/generators/cpp_expectations/imports/transitive/main.h @@ -0,0 +1 @@ +#include "mid.h" diff --git a/flatdata-generator/tests/generators/cpp_expectations/imports/transitive/main.h.1 b/flatdata-generator/tests/generators/cpp_expectations/imports/transitive/main.h.1 new file mode 100644 index 00000000..58042b4a --- /dev/null +++ b/flatdata-generator/tests/generators/cpp_expectations/imports/transitive/main.h.1 @@ -0,0 +1 @@ +class A : public flatdata::Archive diff --git a/flatdata-generator/tests/generators/dot_expectations/imports/cross_namespace/main.dot b/flatdata-generator/tests/generators/dot_expectations/imports/cross_namespace/main.dot new file mode 100644 index 00000000..05f407d3 --- /dev/null +++ b/flatdata-generator/tests/generators/dot_expectations/imports/cross_namespace/main.dot @@ -0,0 +1,3 @@ +cluster__defs +{ + penwidth=0; diff --git a/flatdata-generator/tests/generators/dot_expectations/imports/cross_namespace/main.dot.1 b/flatdata-generator/tests/generators/dot_expectations/imports/cross_namespace/main.dot.1 new file mode 100644 index 00000000..588361dc --- /dev/null +++ b/flatdata-generator/tests/generators/dot_expectations/imports/cross_namespace/main.dot.1 @@ -0,0 +1,3 @@ +cluster__app +{ + penwidth=0; diff --git a/flatdata-generator/tests/generators/dot_expectations/imports/cyclic/parent.dot b/flatdata-generator/tests/generators/dot_expectations/imports/cyclic/parent.dot new file mode 100644 index 00000000..f343db48 --- /dev/null +++ b/flatdata-generator/tests/generators/dot_expectations/imports/cyclic/parent.dot @@ -0,0 +1,3 @@ +cluster__m +{ + penwidth=0; diff --git a/flatdata-generator/tests/generators/dot_expectations/imports/diamond/main.dot b/flatdata-generator/tests/generators/dot_expectations/imports/diamond/main.dot new file mode 100644 index 00000000..d704e6ee --- /dev/null +++ b/flatdata-generator/tests/generators/dot_expectations/imports/diamond/main.dot @@ -0,0 +1,3 @@ +cluster__common +{ + penwidth=0; diff --git a/flatdata-generator/tests/generators/dot_expectations/imports/diamond/main.dot.1 b/flatdata-generator/tests/generators/dot_expectations/imports/diamond/main.dot.1 new file mode 100644 index 00000000..b19fd885 --- /dev/null +++ b/flatdata-generator/tests/generators/dot_expectations/imports/diamond/main.dot.1 @@ -0,0 +1,3 @@ +cluster__ext_a +{ + penwidth=0; diff --git a/flatdata-generator/tests/generators/dot_expectations/imports/diamond/main.dot.2 b/flatdata-generator/tests/generators/dot_expectations/imports/diamond/main.dot.2 new file mode 100644 index 00000000..f5d8a3b1 --- /dev/null +++ b/flatdata-generator/tests/generators/dot_expectations/imports/diamond/main.dot.2 @@ -0,0 +1,3 @@ +cluster__ext_b +{ + penwidth=0; diff --git a/flatdata-generator/tests/generators/dot_expectations/imports/nested_path/main.dot b/flatdata-generator/tests/generators/dot_expectations/imports/nested_path/main.dot new file mode 100644 index 00000000..5d7d657c --- /dev/null +++ b/flatdata-generator/tests/generators/dot_expectations/imports/nested_path/main.dot @@ -0,0 +1,3 @@ +cluster__geo +{ + penwidth=0; diff --git a/flatdata-generator/tests/generators/dot_expectations/imports/nested_path/main.dot.1 b/flatdata-generator/tests/generators/dot_expectations/imports/nested_path/main.dot.1 new file mode 100644 index 00000000..588361dc --- /dev/null +++ b/flatdata-generator/tests/generators/dot_expectations/imports/nested_path/main.dot.1 @@ -0,0 +1,3 @@ +cluster__app +{ + penwidth=0; diff --git a/flatdata-generator/tests/generators/dot_expectations/imports/simple/main.dot b/flatdata-generator/tests/generators/dot_expectations/imports/simple/main.dot new file mode 100644 index 00000000..c623fcd1 --- /dev/null +++ b/flatdata-generator/tests/generators/dot_expectations/imports/simple/main.dot @@ -0,0 +1,3 @@ +cluster__import_types +{ + penwidth=0; \ No newline at end of file diff --git a/flatdata-generator/tests/generators/dot_expectations/imports/simple/main.dot.1 b/flatdata-generator/tests/generators/dot_expectations/imports/simple/main.dot.1 new file mode 100644 index 00000000..df4b2dfe --- /dev/null +++ b/flatdata-generator/tests/generators/dot_expectations/imports/simple/main.dot.1 @@ -0,0 +1,3 @@ +cluster__app +{ + penwidth=0; \ No newline at end of file diff --git a/flatdata-generator/tests/generators/dot_expectations/imports/transitive/main.dot b/flatdata-generator/tests/generators/dot_expectations/imports/transitive/main.dot new file mode 100644 index 00000000..7a5bba23 --- /dev/null +++ b/flatdata-generator/tests/generators/dot_expectations/imports/transitive/main.dot @@ -0,0 +1,3 @@ +cluster__lib +{ + penwidth=0; diff --git a/flatdata-generator/tests/generators/dot_expectations/imports/transitive/main.dot.1 b/flatdata-generator/tests/generators/dot_expectations/imports/transitive/main.dot.1 new file mode 100644 index 00000000..588361dc --- /dev/null +++ b/flatdata-generator/tests/generators/dot_expectations/imports/transitive/main.dot.1 @@ -0,0 +1,3 @@ +cluster__app +{ + penwidth=0; diff --git a/flatdata-generator/tests/generators/flatdata_expectations/imports/cross_namespace/main.flatdata b/flatdata-generator/tests/generators/flatdata_expectations/imports/cross_namespace/main.flatdata new file mode 100644 index 00000000..d13dd380 --- /dev/null +++ b/flatdata-generator/tests/generators/flatdata_expectations/imports/cross_namespace/main.flatdata @@ -0,0 +1,8 @@ +namespace defs { +enum Kind : u8 : 8 +{ + A = 0, + B = 1, + C = 2, +} +} diff --git a/flatdata-generator/tests/generators/flatdata_expectations/imports/cyclic/parent.flatdata b/flatdata-generator/tests/generators/flatdata_expectations/imports/cyclic/parent.flatdata new file mode 100644 index 00000000..b59b0632 --- /dev/null +++ b/flatdata-generator/tests/generators/flatdata_expectations/imports/cyclic/parent.flatdata @@ -0,0 +1,6 @@ +namespace m { +struct Item +{ + value : u64 : 64; +} +} diff --git a/flatdata-generator/tests/generators/flatdata_expectations/imports/diamond/main.flatdata b/flatdata-generator/tests/generators/flatdata_expectations/imports/diamond/main.flatdata new file mode 100644 index 00000000..0e30c00c --- /dev/null +++ b/flatdata-generator/tests/generators/flatdata_expectations/imports/diamond/main.flatdata @@ -0,0 +1,7 @@ +namespace common { +struct Point +{ + x : i32 : 32; + y : i32 : 32; +} +} diff --git a/flatdata-generator/tests/generators/flatdata_expectations/imports/diamond/main.flatdata.1 b/flatdata-generator/tests/generators/flatdata_expectations/imports/diamond/main.flatdata.1 new file mode 100644 index 00000000..469a7e14 --- /dev/null +++ b/flatdata-generator/tests/generators/flatdata_expectations/imports/diamond/main.flatdata.1 @@ -0,0 +1,7 @@ +namespace ext_a { +struct Label +{ + id : u32 : 32; + kind : .common.Color : 8; +} +} diff --git a/flatdata-generator/tests/generators/flatdata_expectations/imports/diamond/main.flatdata.2 b/flatdata-generator/tests/generators/flatdata_expectations/imports/diamond/main.flatdata.2 new file mode 100644 index 00000000..c7d0cf3a --- /dev/null +++ b/flatdata-generator/tests/generators/flatdata_expectations/imports/diamond/main.flatdata.2 @@ -0,0 +1,7 @@ +namespace ext_b { +struct Tag +{ + id : u32 : 32; + kind : .common.Color : 8; +} +} diff --git a/flatdata-generator/tests/generators/flatdata_expectations/imports/diamond/main.flatdata.3 b/flatdata-generator/tests/generators/flatdata_expectations/imports/diamond/main.flatdata.3 new file mode 100644 index 00000000..e8f19714 --- /dev/null +++ b/flatdata-generator/tests/generators/flatdata_expectations/imports/diamond/main.flatdata.3 @@ -0,0 +1,2 @@ +namespace app { +archive Main diff --git a/flatdata-generator/tests/generators/flatdata_expectations/imports/nested_path/main.flatdata b/flatdata-generator/tests/generators/flatdata_expectations/imports/nested_path/main.flatdata new file mode 100644 index 00000000..e44e6b89 --- /dev/null +++ b/flatdata-generator/tests/generators/flatdata_expectations/imports/nested_path/main.flatdata @@ -0,0 +1,7 @@ +namespace geo { +struct Coord +{ + lat : i32 : 32; + lon : i32 : 32; +} +} diff --git a/flatdata-generator/tests/generators/flatdata_expectations/imports/simple/main.flatdata b/flatdata-generator/tests/generators/flatdata_expectations/imports/simple/main.flatdata new file mode 100644 index 00000000..6c8cf65f --- /dev/null +++ b/flatdata-generator/tests/generators/flatdata_expectations/imports/simple/main.flatdata @@ -0,0 +1,14 @@ +namespace import_types { +struct S +{ + x : u32 : 32; + y : u32 : 32; +} +} + +namespace app { +archive A +{ + data : vector< .import_types.S >; +} +} diff --git a/flatdata-generator/tests/generators/flatdata_expectations/imports/transitive/main.flatdata b/flatdata-generator/tests/generators/flatdata_expectations/imports/transitive/main.flatdata new file mode 100644 index 00000000..6dcfab7d --- /dev/null +++ b/flatdata-generator/tests/generators/flatdata_expectations/imports/transitive/main.flatdata @@ -0,0 +1,6 @@ +namespace lib { +struct Base +{ + value : u32 : 32; +} +} diff --git a/flatdata-generator/tests/generators/flatdata_expectations/imports/transitive/main.flatdata.1 b/flatdata-generator/tests/generators/flatdata_expectations/imports/transitive/main.flatdata.1 new file mode 100644 index 00000000..9da5bddd --- /dev/null +++ b/flatdata-generator/tests/generators/flatdata_expectations/imports/transitive/main.flatdata.1 @@ -0,0 +1,2 @@ +namespace app { +archive A diff --git a/flatdata-generator/tests/generators/py_expectations/imports/cross_namespace/main.py b/flatdata-generator/tests/generators/py_expectations/imports/cross_namespace/main.py new file mode 100644 index 00000000..0cc0ea2d --- /dev/null +++ b/flatdata-generator/tests/generators/py_expectations/imports/cross_namespace/main.py @@ -0,0 +1 @@ +class app_Main(flatdata.archive.Archive): diff --git a/flatdata-generator/tests/generators/py_expectations/imports/cyclic/parent.py b/flatdata-generator/tests/generators/py_expectations/imports/cyclic/parent.py new file mode 100644 index 00000000..84471ed8 --- /dev/null +++ b/flatdata-generator/tests/generators/py_expectations/imports/cyclic/parent.py @@ -0,0 +1 @@ +class m_Item(flatdata.structure.Structure): diff --git a/flatdata-generator/tests/generators/py_expectations/imports/diamond/main.py b/flatdata-generator/tests/generators/py_expectations/imports/diamond/main.py new file mode 100644 index 00000000..7c2b841a --- /dev/null +++ b/flatdata-generator/tests/generators/py_expectations/imports/diamond/main.py @@ -0,0 +1 @@ +class common_Point(flatdata.structure.Structure): diff --git a/flatdata-generator/tests/generators/py_expectations/imports/diamond/main.py.1 b/flatdata-generator/tests/generators/py_expectations/imports/diamond/main.py.1 new file mode 100644 index 00000000..6ee61c67 --- /dev/null +++ b/flatdata-generator/tests/generators/py_expectations/imports/diamond/main.py.1 @@ -0,0 +1 @@ +class ext_a_Label(flatdata.structure.Structure): diff --git a/flatdata-generator/tests/generators/py_expectations/imports/diamond/main.py.2 b/flatdata-generator/tests/generators/py_expectations/imports/diamond/main.py.2 new file mode 100644 index 00000000..0600c7c5 --- /dev/null +++ b/flatdata-generator/tests/generators/py_expectations/imports/diamond/main.py.2 @@ -0,0 +1 @@ +class ext_b_Tag(flatdata.structure.Structure): diff --git a/flatdata-generator/tests/generators/py_expectations/imports/diamond/main.py.3 b/flatdata-generator/tests/generators/py_expectations/imports/diamond/main.py.3 new file mode 100644 index 00000000..0cc0ea2d --- /dev/null +++ b/flatdata-generator/tests/generators/py_expectations/imports/diamond/main.py.3 @@ -0,0 +1 @@ +class app_Main(flatdata.archive.Archive): diff --git a/flatdata-generator/tests/generators/py_expectations/imports/nested_path/main.py b/flatdata-generator/tests/generators/py_expectations/imports/nested_path/main.py new file mode 100644 index 00000000..e9d59d6e --- /dev/null +++ b/flatdata-generator/tests/generators/py_expectations/imports/nested_path/main.py @@ -0,0 +1 @@ +class geo_Coord(flatdata.structure.Structure): diff --git a/flatdata-generator/tests/generators/py_expectations/imports/simple/main.py b/flatdata-generator/tests/generators/py_expectations/imports/simple/main.py new file mode 100644 index 00000000..5129d71d --- /dev/null +++ b/flatdata-generator/tests/generators/py_expectations/imports/simple/main.py @@ -0,0 +1 @@ +class import_types_S(flatdata.structure.Structure): \ No newline at end of file diff --git a/flatdata-generator/tests/generators/py_expectations/imports/transitive/main.py b/flatdata-generator/tests/generators/py_expectations/imports/transitive/main.py new file mode 100644 index 00000000..44d039ee --- /dev/null +++ b/flatdata-generator/tests/generators/py_expectations/imports/transitive/main.py @@ -0,0 +1 @@ +class lib_Base(flatdata.structure.Structure): diff --git a/flatdata-generator/tests/generators/py_expectations/imports/transitive/main.py.1 b/flatdata-generator/tests/generators/py_expectations/imports/transitive/main.py.1 new file mode 100644 index 00000000..61e2f9ff --- /dev/null +++ b/flatdata-generator/tests/generators/py_expectations/imports/transitive/main.py.1 @@ -0,0 +1 @@ +class app_A(flatdata.archive.Archive): diff --git a/flatdata-generator/tests/generators/rust_expectations/imports/cross_namespace/main.rs.1 b/flatdata-generator/tests/generators/rust_expectations/imports/cross_namespace/main.rs.1 new file mode 100644 index 00000000..499a7cf1 --- /dev/null +++ b/flatdata-generator/tests/generators/rust_expectations/imports/cross_namespace/main.rs.1 @@ -0,0 +1 @@ +pub use super::super::other::defs::*; diff --git a/flatdata-generator/tests/generators/rust_expectations/imports/cyclic/parent.rs.1 b/flatdata-generator/tests/generators/rust_expectations/imports/cyclic/parent.rs.1 new file mode 100644 index 00000000..8e6c9769 --- /dev/null +++ b/flatdata-generator/tests/generators/rust_expectations/imports/cyclic/parent.rs.1 @@ -0,0 +1 @@ +pub use super::super::child::m::*; diff --git a/flatdata-generator/tests/generators/rust_expectations/imports/diamond/main.rs.1 b/flatdata-generator/tests/generators/rust_expectations/imports/diamond/main.rs.1 new file mode 100644 index 00000000..077bdbf8 --- /dev/null +++ b/flatdata-generator/tests/generators/rust_expectations/imports/diamond/main.rs.1 @@ -0,0 +1 @@ +pub use super::super::common::common::*; diff --git a/flatdata-generator/tests/generators/rust_expectations/imports/diamond/main.rs.2 b/flatdata-generator/tests/generators/rust_expectations/imports/diamond/main.rs.2 new file mode 100644 index 00000000..fd9dc4b3 --- /dev/null +++ b/flatdata-generator/tests/generators/rust_expectations/imports/diamond/main.rs.2 @@ -0,0 +1 @@ +pub use super::super::a::ext_a::*; diff --git a/flatdata-generator/tests/generators/rust_expectations/imports/diamond/main.rs.3 b/flatdata-generator/tests/generators/rust_expectations/imports/diamond/main.rs.3 new file mode 100644 index 00000000..563b75ad --- /dev/null +++ b/flatdata-generator/tests/generators/rust_expectations/imports/diamond/main.rs.3 @@ -0,0 +1 @@ +pub use super::super::b::ext_b::*; diff --git a/flatdata-generator/tests/generators/rust_expectations/imports/nested_path/main.rs.1 b/flatdata-generator/tests/generators/rust_expectations/imports/nested_path/main.rs.1 new file mode 100644 index 00000000..e3f5482c --- /dev/null +++ b/flatdata-generator/tests/generators/rust_expectations/imports/nested_path/main.rs.1 @@ -0,0 +1 @@ +pub use super::super::sub::types::geo::*; diff --git a/flatdata-generator/tests/generators/rust_expectations/imports/simple/main.rs.1 b/flatdata-generator/tests/generators/rust_expectations/imports/simple/main.rs.1 new file mode 100644 index 00000000..f3dfb585 --- /dev/null +++ b/flatdata-generator/tests/generators/rust_expectations/imports/simple/main.rs.1 @@ -0,0 +1 @@ +pub use super::super::types::import_types::*; \ No newline at end of file diff --git a/flatdata-generator/tests/generators/rust_expectations/imports/transitive/main.rs.1 b/flatdata-generator/tests/generators/rust_expectations/imports/transitive/main.rs.1 new file mode 100644 index 00000000..40b2967d --- /dev/null +++ b/flatdata-generator/tests/generators/rust_expectations/imports/transitive/main.rs.1 @@ -0,0 +1 @@ +pub use super::super::lib::lib::*; diff --git a/flatdata-generator/tests/generators/rust_expectations/imports/transitive/main.rs.2 b/flatdata-generator/tests/generators/rust_expectations/imports/transitive/main.rs.2 new file mode 100644 index 00000000..9c4fcbfc --- /dev/null +++ b/flatdata-generator/tests/generators/rust_expectations/imports/transitive/main.rs.2 @@ -0,0 +1 @@ +pub use super::super::mid::mid::*; diff --git a/flatdata-generator/tests/generators/schemas.py b/flatdata-generator/tests/generators/schemas.py index a7498f89..9387b0a1 100644 --- a/flatdata-generator/tests/generators/schemas.py +++ b/flatdata-generator/tests/generators/schemas.py @@ -12,7 +12,11 @@ def schemas_and_expectations(generator, extension): basedir = os.path.dirname(__file__) test_dir = os.path.normpath(os.path.join( basedir, '..', '..', '..', 'test_cases')) + imports_dir = os.path.join(test_dir, 'imports') for path, _subdirs, files in os.walk(test_dir): + # Skip multi-file import test cases (handled by test_import_generators.py) + if os.path.commonpath([path, imports_dir]) == imports_dir: + continue for name in files: if os.path.splitext(name)[1] == '.flatdata': relpath = os.path.relpath(path, test_dir) diff --git a/flatdata-generator/tests/generators/test_import_generators.py b/flatdata-generator/tests/generators/test_import_generators.py new file mode 100644 index 00000000..41a73191 --- /dev/null +++ b/flatdata-generator/tests/generators/test_import_generators.py @@ -0,0 +1,156 @@ +''' + Copyright (c) 2025 HERE Europe B.V. + See the LICENSE file in the root of this project for license details. +''' + +import glob +import os + +import pytest + +from flatdata.generator.engine import Engine +from .assertions import unify_whitespace, diff + + +# Map of generator name → (expectation dir suffix, file extension) +GENERATORS = { + 'cpp': ('cpp_expectations', 'h'), + 'rust': ('rust_expectations', 'rs'), + 'flatdata': ('flatdata_expectations', 'flatdata'), + 'dot': ('dot_expectations', 'dot'), + 'py': ('py_expectations', 'py'), +} + +BASEDIR = os.path.dirname(__file__) +TEST_DIR = os.path.normpath(os.path.join(BASEDIR, '..', '..', '..', 'test_cases', 'imports')) + + +def _discover_import_test_cases(): + """Discover import test case directories and their root schema files.""" + cases = [] + if not os.path.isdir(TEST_DIR): + return cases + for case_name in sorted(os.listdir(TEST_DIR)): + case_dir = os.path.join(TEST_DIR, case_name) + if not os.path.isdir(case_dir): + continue + # Find root schema: prefer main.flatdata, fall back to parent.flatdata (cyclic) + for root_name in ['main.flatdata', 'parent.flatdata']: + root_path = os.path.join(case_dir, root_name) + if os.path.exists(root_path): + root_stem = os.path.splitext(root_name)[0] + cases.append((case_name, root_path, root_stem)) + break + return cases + + +def _get_expectations(case_name, root_stem, generator_name): + """Load expectation files for a given test case and generator.""" + expect_dir, ext = GENERATORS[generator_name] + pattern = os.path.join( + BASEDIR, expect_dir, 'imports', case_name, root_stem + '.' + ext + '*') + expectations = [] + for path in sorted(glob.glob(pattern)): + with open(path, 'r') as f: + expectations.append(f.read()) + return expectations + + +def _get_test_params(): + """Generate (case_name, root_path, root_stem, generator_name) tuples.""" + params = [] + for case_name, root_path, root_stem in _discover_import_test_cases(): + for gen_name in GENERATORS: + expect = _get_expectations(case_name, root_stem, gen_name) + if expect: + params.append(pytest.param( + root_path, gen_name, expect, + id=f"{case_name}-{gen_name}")) + return params + + +@pytest.mark.parametrize("root_path,generator_name,expectations", _get_test_params()) +def test_import_against_expectations(root_path, generator_name, expectations): + """Test that import schemas generate output matching expectation snippets.""" + engine = Engine.from_file(root_path) + output = engine.render(generator_name) + output_unified = unify_whitespace(output) + + for expectation in expectations: + expectation_unified = unify_whitespace(expectation) + assert expectation_unified in output_unified, \ + "\nExpectation not found in output:\n========== DIFF ===========\n%s" % \ + diff(expectation, output) + + +def _get_generation_params(): + """All (case, generator) combos for smoke test — verify generation succeeds.""" + params = [] + for case_name, root_path, root_stem in _discover_import_test_cases(): + for gen_name in GENERATORS: + params.append(pytest.param( + root_path, gen_name, + id=f"{case_name}-{gen_name}")) + return params + + +@pytest.mark.parametrize("root_path,generator_name", _get_generation_params()) +def test_import_generation_succeeds(root_path, generator_name): + """Smoke test: all import schemas generate without errors for all backends.""" + engine = Engine.from_file(root_path) + output = engine.render(generator_name) + assert len(output) > 0 + + +class TestImportSeparateCompilation: + """Verify separate compilation behavior for C++ and Rust.""" + + @pytest.mark.parametrize("case_name,root_path,root_stem", + _discover_import_test_cases(), + ids=[c[0] for c in _discover_import_test_cases()]) + def test_cpp_no_imported_struct_definitions(self, case_name, root_path, root_stem): + """C++ output should not define structs from imported files.""" + engine = Engine.from_file(root_path) + tree = engine.tree + if not tree.imports: + pytest.skip("No imports in this test case") + + output = engine.render("cpp") + from flatdata.generator.tree.nodes.trivial import Structure + for struct in tree.root.iterate(Structure): + if not struct.is_local and "builtin" not in struct.path: + # C++ structs are generated as union {name}Template + assert f"{struct.name}Template" not in output, \ + f"Imported struct {struct.name} should not be defined in C++ output" + + @pytest.mark.parametrize("case_name,root_path,root_stem", + _discover_import_test_cases(), + ids=[c[0] for c in _discover_import_test_cases()]) + def test_rust_no_imported_struct_definitions(self, case_name, root_path, root_stem): + """Rust output should not define structs from imported files (outside schema strings).""" + engine = Engine.from_file(root_path) + tree = engine.tree + if not tree.imports: + pytest.skip("No imports in this test case") + + output = engine.render("rust") + # Split out embedded schema strings (between r#"schema( and )schema"#) + # to avoid false positives from schema definitions + import re + code_only = re.sub(r'r#"schema\(.*?\)schema"#', '', output, flags=re.DOTALL) + + from flatdata.generator.tree.nodes.trivial import Structure + for struct in tree.root.iterate(Structure): + if not struct.is_local and "builtin" not in struct.path: + assert f"pub struct {struct.name}" not in code_only, \ + f"Imported struct {struct.name} should not be defined in Rust output" + + @pytest.mark.parametrize("case_name,root_path,root_stem", + _discover_import_test_cases(), + ids=[c[0] for c in _discover_import_test_cases()]) + def test_flatdata_is_self_contained(self, case_name, root_path, root_stem): + """Generated flatdata output must not contain import statements.""" + engine = Engine.from_file(root_path) + output = engine.render("flatdata") + assert 'import "' not in output, \ + "Generated flatdata schema must be self-contained (no imports)" diff --git a/flatdata-generator/tests/test_engine.py b/flatdata-generator/tests/test_engine.py new file mode 100644 index 00000000..c324b133 --- /dev/null +++ b/flatdata-generator/tests/test_engine.py @@ -0,0 +1,364 @@ +''' + Copyright (c) 2025 HERE Europe B.V. + See the LICENSE file in the root of this project for license details. +''' + +import os + +import pytest + +from flatdata.generator.engine import Engine +from flatdata.generator.tree.errors import ( + FlatdataSyntaxError, ImportFileNotFoundError, ParsingError) +from flatdata.generator.tree.nodes.trivial import Structure +from flatdata.generator.tree.nodes.archive import Archive + + +def _write_files(tmpdir, files): + """Write a dict of {relative_path: content} into tmpdir.""" + for rel_path, content in files.items(): + full = os.path.join(tmpdir, rel_path) + os.makedirs(os.path.dirname(full), exist_ok=True) + with open(full, "w") as f: + f.write(content) + + +class TestEngineFromFile: + """Tests for Engine.from_file() with import support.""" + + def test_single_file(self, tmp_path): + _write_files(str(tmp_path), { + "main.flatdata": ''' +namespace n{ + struct S { f : u8 : 8; } + archive A { r : vector< S >; } +} +''' + }) + engine = Engine.from_file(str(tmp_path / "main.flatdata")) + assert engine.tree is not None + assert len(list(engine.tree.root.iterate(Archive))) == 1 + + def test_with_imports(self, tmp_path): + _write_files(str(tmp_path), { + "main.flatdata": ''' +import "types.flatdata"; +namespace n{ + archive A { r : vector< S >; } +} +''', + "types.flatdata": ''' +namespace n{ struct S { f : u8 : 8; } } +''' + }) + engine = Engine.from_file(str(tmp_path / "main.flatdata")) + structs = list(engine.tree.root.iterate(Structure)) + archives = list(engine.tree.root.iterate(Archive)) + assert any(s.name == "S" for s in structs) + assert any(a.name == "A" for a in archives) + + def test_schema_attribute_contains_root_file_content(self, tmp_path): + content = 'namespace n{ struct S { f : u8 : 8; } }' + _write_files(str(tmp_path), {"main.flatdata": content}) + engine = Engine.from_file(str(tmp_path / "main.flatdata")) + assert engine.schema == content + + def test_missing_import_raises(self, tmp_path): + _write_files(str(tmp_path), { + "main.flatdata": 'import "missing.flatdata"; namespace n{ struct S { f : u8 : 8; } }' + }) + with pytest.raises(ImportFileNotFoundError): + Engine.from_file(str(tmp_path / "main.flatdata")) + + def test_nonexistent_root_file_raises(self, tmp_path): + """Non-existent root file raises FlatdataSyntaxError, not FileNotFoundError.""" + with pytest.raises(FlatdataSyntaxError): + Engine.from_file(str(tmp_path / "nonexistent.flatdata")) + + def test_invalid_root_file_raises_parsing_error(self, tmp_path): + _write_files(str(tmp_path), { + "main.flatdata": 'this is not valid flatdata' + }) + with pytest.raises(ParsingError): + Engine.from_file(str(tmp_path / "main.flatdata")) + + def test_render_with_imports(self, tmp_path): + """Flatdata generator produces self-contained output with all types.""" + _write_files(str(tmp_path), { + "main.flatdata": ''' +import "types.flatdata"; +namespace n{ + archive A { r : vector< S >; } +} +''', + "types.flatdata": ''' +namespace n{ struct S { f : u8 : 8; } } +''' + }) + engine = Engine.from_file(str(tmp_path / "main.flatdata")) + output = engine.render("flatdata") + # Both local and imported types are emitted (monolithic) + assert "struct S" in output + assert "archive A" in output + # No import directives in output — schema must be self-contained + assert "import" not in output + + def test_schema_embedding_self_contained(self, tmp_path): + """Schema embedding includes all dependencies from imports.""" + _write_files(str(tmp_path), { + "main.flatdata": ''' +import "types.flatdata"; +namespace n{ + archive A { r : vector< S >; } +} +''', + "types.flatdata": ''' +namespace n{ struct S { f : u8 : 8; } } +''' + }) + engine = Engine.from_file(str(tmp_path / "main.flatdata")) + archive = next(engine.tree.root.iterate(Archive)) + schema = engine.tree.schema(archive) + assert "struct S" in schema + assert "archive A" in schema + assert "import" not in schema + + def test_imports_metadata_available(self, tmp_path): + _write_files(str(tmp_path), { + "main.flatdata": ''' +import "types.flatdata"; +namespace n{ archive A { r : vector< S >; } } +''', + "types.flatdata": ''' +namespace n{ struct S { f : u8 : 8; } } +''' + }) + engine = Engine.from_file(str(tmp_path / "main.flatdata")) + assert len(engine.tree.imports) == 1 + assert engine.tree.imports[0].path == "types.flatdata" + + def test_python_monolithic_with_imports(self, tmp_path): + """Python generator emits all types including imported ones.""" + _write_files(str(tmp_path), { + "main.flatdata": ''' +import "types.flatdata"; +namespace n{ archive A { r : vector< S >; } } +''', + "types.flatdata": ''' +namespace n{ struct S { f : u8 : 8; } } +''' + }) + engine = Engine.from_file(str(tmp_path / "main.flatdata")) + output = engine.render("py") + assert "n_S" in output + assert "n_A" in output + + def test_dot_monolithic_with_imports(self, tmp_path): + """Dot generator renders all types including imported ones.""" + _write_files(str(tmp_path), { + "main.flatdata": ''' +import "types.flatdata"; +namespace n{ archive A { r : vector< S >; } } +''', + "types.flatdata": ''' +namespace n{ struct S { f : u8 : 8; } } +''' + }) + engine = Engine.from_file(str(tmp_path / "main.flatdata")) + output = engine.render("dot") + # Archive rendered + assert "cluster__n_A" in output + # Imported struct rendered within the archive's resource + assert "_n_A_r_n_S" in output + + def test_cpp_separate_compilation_with_imports(self, tmp_path): + """C++ generator emits only local types and #include directives.""" + _write_files(str(tmp_path), { + "main.flatdata": ''' +import "types.flatdata"; +namespace n{ + struct Local { x : u8 : 8; } + archive A { r : vector< S >; r2 : vector< Local >; } +} +''', + "types.flatdata": ''' +namespace n{ struct S { f : u8 : 8; } } +''' + }) + engine = Engine.from_file(str(tmp_path / "main.flatdata")) + output = engine.render("cpp") + # Include directive for imported file + assert '#include "types.h"' in output + # Local struct definition IS emitted + assert "LocalTemplate" in output + # Imported struct S is NOT emitted as a C++ struct definition + assert "STemplate" not in output + + def test_cpp_include_path_mapping(self, tmp_path): + """C++ import paths map .flatdata to .h correctly.""" + _write_files(str(tmp_path), { + "main.flatdata": ''' +import "sub/types.flatdata"; +namespace n{ archive A { r : vector< S >; } } +''', + "sub/types.flatdata": ''' +namespace n{ struct S { f : u8 : 8; } } +''' + }) + engine = Engine.from_file(str(tmp_path / "main.flatdata")) + output = engine.render("cpp") + assert '#include "sub/types.h"' in output + + def test_cpp_no_imports_unchanged(self): + """C++ generator without imports produces normal output (no empty include block).""" + engine = Engine(''' +namespace n{ + struct S { f : u8 : 8; } + archive A { r : vector< S >; } +} +''') + output = engine.render("cpp") + assert "struct S" in output or "SType" in output + # No user includes (only system includes) + assert '#include "' not in output + + def test_rust_separate_compilation_same_namespace(self, tmp_path): + """Rust generator emits only local types with pub use re-exports.""" + _write_files(str(tmp_path), { + "main.flatdata": ''' +import "types.flatdata"; +namespace n{ + struct Local { x : u8 : 8; } + archive A { r : vector< S >; r2 : vector< Local >; } +} +''', + "types.flatdata": ''' +namespace n{ struct S { f : u8 : 8; } } +''' + }) + engine = Engine.from_file(str(tmp_path / "main.flatdata")) + output = engine.render("rust") + # Local struct IS emitted + assert "pub struct Local" in output + # Imported struct S is NOT emitted as a definition + assert "pub struct S " not in output + # Re-export directive brings imported types into scope + assert "pub use super::super::types::n::*;" in output + # Schema embedding is self-contained (includes imported S) + assert "struct S" in output # appears in schema strings + + def test_rust_separate_compilation_cross_namespace(self, tmp_path): + """Rust generates namespace shims with re-exports for imported namespaces.""" + _write_files(str(tmp_path), { + "main.flatdata": ''' +import "types.flatdata"; +namespace app{ archive A { r : vector< .common.S >; } } +''', + "types.flatdata": ''' +namespace common{ struct S { f : u8 : 8; } } +''' + }) + engine = Engine.from_file(str(tmp_path / "main.flatdata")) + output = engine.render("rust") + # Imported-only namespace is still emitted as a module shim + assert "pub mod common" in output + assert "pub use super::super::types::common::*;" in output + # Local namespace has the archive + assert "pub mod app" in output + assert "struct A" in output + + def test_rust_subdirectory_import(self, tmp_path): + """Rust re-export paths handle subdirectory imports correctly.""" + _write_files(str(tmp_path), { + "main.flatdata": ''' +import "sub/types.flatdata"; +namespace n{ archive A { r : vector< S >; } } +''', + "sub/types.flatdata": ''' +namespace n{ struct S { f : u8 : 8; } } +''' + }) + engine = Engine.from_file(str(tmp_path / "main.flatdata")) + output = engine.render("rust") + assert "pub use super::super::sub::types::n::*;" in output + + def test_rust_no_imports_unchanged(self): + """Rust generator without imports produces normal output.""" + engine = Engine(''' +namespace n{ + struct S { f : u8 : 8; } + archive A { r : vector< S >; } +} +''') + output = engine.render("rust") + assert "pub struct S" in output + assert "pub use super::" not in output + + def test_rust_transitive_import_reexports(self, tmp_path): + """Rust re-exports work for transitively imported types.""" + _write_files(str(tmp_path), { + "main.flatdata": ''' +import "mid.flatdata"; +namespace n{ archive A { r : vector< .lib.S >; } } +''', + "mid.flatdata": ''' +import "lib.flatdata"; +namespace n{ struct Mid { m : u8 : 8; } } +''', + "lib.flatdata": ''' +namespace lib{ struct S { f : u8 : 8; } } +''' + }) + engine = Engine.from_file(str(tmp_path / "main.flatdata")) + output = engine.render("rust") + # Transitive import gets a re-export shim + assert "pub mod lib" in output + assert "pub use super::super::lib::lib::*;" in output + # Direct import also re-exported + assert "pub use super::super::mid::n::*;" in output + + def test_rust_parent_directory_import(self, tmp_path): + """Rust re-exports use multiple super:: for parent directory imports.""" + _write_files(str(tmp_path), { + "sub/main.flatdata": ''' +import "../shared.flatdata"; +namespace n{ archive A { r : vector< S >; } } +''', + "shared.flatdata": ''' +namespace n{ struct S { f : u8 : 8; } } +''' + }) + engine = Engine.from_file(str(tmp_path / "sub" / "main.flatdata")) + output = engine.render("rust") + # "../shared" needs two super:: (one sibling + one for "..") + assert "pub use super::super::super::shared::n::*;" in output + + +class TestEngineBackwardCompat: + """Verify Engine(schema_string) still works unchanged.""" + + def test_engine_string_constructor(self): + engine = Engine(''' +namespace n{ + struct S { f : u8 : 8; } + archive A { r : vector< S >; } +} +''') + assert engine.tree is not None + structs = list(engine.tree.root.iterate(Structure)) + assert any(s.name == "S" for s in structs) + + def test_engine_string_render(self): + engine = Engine('namespace n{ struct S { f : u8 : 8; } }') + output = engine.render("flatdata") + assert "struct S" in output + + def test_engine_string_no_imports(self): + engine = Engine('namespace n{ struct S { f : u8 : 8; } }') + assert len(engine.tree.imports) == 0 + + def test_engine_string_with_imports_raises(self): + """Import statements in string-based Engine should raise an error.""" + from flatdata.generator.tree.errors import UnresolvedImportError + with pytest.raises(UnresolvedImportError): + Engine('import "foo.flatdata"; namespace n{ struct S { f : u8 : 8; } }') diff --git a/flatdata-generator/tests/tree/test_importer.py b/flatdata-generator/tests/tree/test_importer.py new file mode 100644 index 00000000..7382b76e --- /dev/null +++ b/flatdata-generator/tests/tree/test_importer.py @@ -0,0 +1,362 @@ +''' + Copyright (c) 2025 HERE Europe B.V. + See the LICENSE file in the root of this project for license details. +''' + +import os +import tempfile + +import pytest + +from flatdata.generator.tree.importer import ( + resolve_imports, ImportInfo +) +from flatdata.generator.tree.errors import ImportFileNotFoundError, ImportParsingError +from flatdata.generator.grammar import flatdata_grammar + + +class TestGrammarImport: + """Test that the grammar correctly parses import statements.""" + + def test_single_import(self): + schema = 'import "bar.flatdata";\nnamespace foo { struct A { x : u32 : 32; } }' + parsed = flatdata_grammar.parse_string(schema, parse_all=True).flatdata + assert len(parsed.imports) == 1 + assert parsed.imports[0].path == "bar.flatdata" + + def test_multiple_imports(self): + schema = ( + 'import "bar.flatdata";\n' + 'import "baz.flatdata";\n' + 'namespace foo { struct A { x : u32 : 32; } }' + ) + parsed = flatdata_grammar.parse_string(schema, parse_all=True).flatdata + assert len(parsed.imports) == 2 + assert parsed.imports[0].path == "bar.flatdata" + assert parsed.imports[1].path == "baz.flatdata" + + def test_import_with_path(self): + schema = 'import "sub/dir/types.flatdata";\nnamespace foo { struct A { x : u32 : 32; } }' + parsed = flatdata_grammar.parse_string(schema, parse_all=True).flatdata + assert len(parsed.imports) == 1 + assert parsed.imports[0].path == "sub/dir/types.flatdata" + + def test_no_imports(self): + schema = 'namespace foo { struct A { x : u32 : 32; } }' + parsed = flatdata_grammar.parse_string(schema, parse_all=True).flatdata + assert len(parsed.imports) == 0 + + def test_import_only_file(self): + """A file with only imports and no namespaces should parse.""" + schema = 'import "bar.flatdata";' + parsed = flatdata_grammar.parse_string(schema, parse_all=True).flatdata + assert len(parsed.imports) == 1 + assert len(parsed.namespace) == 0 + + def test_import_with_comment_before(self): + schema = ( + '/* header comment */\n' + 'import "bar.flatdata";\n' + 'namespace foo { struct A { x : u32 : 32; } }' + ) + parsed = flatdata_grammar.parse_string(schema, parse_all=True).flatdata + assert len(parsed.imports) == 1 + + def test_comment_attached_to_import(self): + """A comment directly before an import should be captured as its doc (except the first, which may be consumed by free_comments).""" + schema = ( + 'import "a.flatdata";\n' + '/** docs for b */\n' + 'import "b.flatdata";\n' + 'namespace foo { struct A { x : u32 : 32; } }' + ) + parsed = flatdata_grammar.parse_string(schema, parse_all=True).flatdata + assert len(parsed.imports) == 2 + assert parsed.imports[0].path == "a.flatdata" + assert parsed.imports[1].path == "b.flatdata" + assert "docs for b" in parsed.imports[1].doc + + def test_comment_between_imports_and_namespace(self): + """A comment after the last import (before namespace) should attach to the namespace, not break parsing.""" + schema = ( + 'import "a.flatdata";\n' + '/* comment */\n' + 'namespace foo { struct A { x : u32 : 32; } }' + ) + parsed = flatdata_grammar.parse_string(schema, parse_all=True).flatdata + assert len(parsed.imports) == 1 + + def test_empty_schema(self): + """An empty schema should parse (zero imports, zero namespaces).""" + schema = '' + parsed = flatdata_grammar.parse_string(schema, parse_all=True).flatdata + assert len(parsed.imports) == 0 + assert len(parsed.namespace) == 0 + + def test_import_after_namespace(self): + """Imports may appear after namespaces (relaxed ordering).""" + schema = ( + 'namespace foo { struct A { x : u32 : 32; } }\n' + 'import "bar.flatdata";' + ) + parsed = flatdata_grammar.parse_string(schema, parse_all=True).flatdata + assert len(parsed.namespace) == 1 + assert len(parsed.imports) == 1 + assert parsed.imports[0].path == "bar.flatdata" + + +def _write_temp_files(tmpdir: str, files: dict[str, str]) -> str: + """Write files to tmpdir, return path to first file.""" + first_path = None + for name, content in files.items(): + path = os.path.join(tmpdir, name) + os.makedirs(os.path.dirname(path), exist_ok=True) + with open(path, 'w') as f: + f.write(content) + if first_path is None: + first_path = path + assert first_path is not None + return first_path + + +class TestResolveImports: + """Test the recursive import resolver.""" + + def test_no_imports(self): + with tempfile.TemporaryDirectory() as tmpdir: + root = _write_temp_files(tmpdir, { + "main.flatdata": 'namespace foo { struct A { x : u32 : 32; } }' + }) + files, root_imports = resolve_imports(root) + assert len(files) == 1 + assert files[0].abs_path == os.path.realpath(root) + assert root_imports == [] + + def test_simple_import(self): + with tempfile.TemporaryDirectory() as tmpdir: + root = _write_temp_files(tmpdir, { + "main.flatdata": ( + 'import "bar.flatdata";\n' + 'namespace foo { struct A { x : u32 : 32; } }' + ), + "bar.flatdata": 'namespace bar { struct B { y : u32 : 32; } }' + }) + files, root_imports = resolve_imports(root) + assert len(files) == 2 + # Dependency-first order: bar before main + assert files[0].abs_path == os.path.realpath( + os.path.join(tmpdir, "bar.flatdata")) + assert files[1].abs_path == os.path.realpath(root) + assert len(root_imports) == 1 + assert root_imports[0].path == "bar.flatdata" + + def test_diamond_import(self): + """A→B, A→C, B→D, C→D: D should appear only once.""" + with tempfile.TemporaryDirectory() as tmpdir: + root = _write_temp_files(tmpdir, { + "a.flatdata": ( + 'import "b.flatdata";\n' + 'import "c.flatdata";\n' + 'namespace a { struct A { x : u32 : 32; } }' + ), + "b.flatdata": ( + 'import "d.flatdata";\n' + 'namespace b { struct B { x : u32 : 32; } }' + ), + "c.flatdata": ( + 'import "d.flatdata";\n' + 'namespace c { struct C { x : u32 : 32; } }' + ), + "d.flatdata": 'namespace d { struct D { x : u32 : 32; } }' + }) + files, root_imports = resolve_imports( + os.path.join(tmpdir, "a.flatdata")) + # Each file appears exactly once + paths = [f.abs_path for f in files] + assert len(paths) == len(set(paths)) + assert len(files) == 4 + # D should come before B and C (dependency-first) + d_idx = next(i for i, f in enumerate(files) + if f.abs_path.endswith("d.flatdata")) + b_idx = next(i for i, f in enumerate(files) + if f.abs_path.endswith("b.flatdata")) + c_idx = next(i for i, f in enumerate(files) + if f.abs_path.endswith("c.flatdata")) + assert d_idx < b_idx + assert d_idx < c_idx + + def test_cyclic_import(self): + """A→B, B→A: both files should be included, no infinite loop.""" + with tempfile.TemporaryDirectory() as tmpdir: + root = _write_temp_files(tmpdir, { + "a.flatdata": ( + 'import "b.flatdata";\n' + 'namespace a { struct A { x : u32 : 32; } }' + ), + "b.flatdata": ( + 'import "a.flatdata";\n' + 'namespace b { struct B { x : u32 : 32; } }' + ) + }) + files, root_imports = resolve_imports( + os.path.join(tmpdir, "a.flatdata")) + assert len(files) == 2 + paths = {f.abs_path for f in files} + assert any(p.endswith("a.flatdata") for p in paths) + assert any(p.endswith("b.flatdata") for p in paths) + + def test_missing_import_file(self): + with tempfile.TemporaryDirectory() as tmpdir: + root = _write_temp_files(tmpdir, { + "main.flatdata": 'import "nonexistent.flatdata";\nnamespace foo { }' + }) + with pytest.raises(ImportFileNotFoundError, + match="nonexistent.flatdata"): + resolve_imports(root) + + def test_nested_path_import(self): + """Import from a subdirectory.""" + with tempfile.TemporaryDirectory() as tmpdir: + root = _write_temp_files(tmpdir, { + "main.flatdata": ( + 'import "sub/types.flatdata";\n' + 'namespace foo { struct A { x : u32 : 32; } }' + ), + "sub/types.flatdata": 'namespace types { struct T { x : u32 : 32; } }' + }) + files, root_imports = resolve_imports( + os.path.join(tmpdir, "main.flatdata")) + assert len(files) == 2 + assert root_imports[0].path == "sub/types.flatdata" + + @pytest.mark.skipif(not hasattr(os, 'symlink'), reason="symlinks not supported") + def test_symlink_dedup(self): + """Two imports of the same file via different paths (symlink) should dedup.""" + with tempfile.TemporaryDirectory() as tmpdir: + _write_temp_files(tmpdir, { + "real.flatdata": 'namespace r { struct R { x : u32 : 32; } }', + "main.flatdata": ( + 'import "real.flatdata";\n' + 'import "link.flatdata";\n' + 'namespace m { struct M { x : u32 : 32; } }' + ) + }) + # Create a symlink + link_path = os.path.join(tmpdir, "link.flatdata") + real_path = os.path.join(tmpdir, "real.flatdata") + os.symlink(real_path, link_path) + + files, _ = resolve_imports(os.path.join(tmpdir, "main.flatdata")) + # real.flatdata and link.flatdata resolve to the same canonical path + assert len(files) == 2 # main + real (deduplicated) + + def test_relative_path_dedup(self): + """Import via ./foo.flatdata and foo.flatdata should dedup.""" + with tempfile.TemporaryDirectory() as tmpdir: + root = _write_temp_files(tmpdir, { + "main.flatdata": ( + 'import "bar.flatdata";\n' + 'import "./bar.flatdata";\n' + 'namespace m { struct M { x : u32 : 32; } }' + ), + "bar.flatdata": 'namespace b { struct B { x : u32 : 32; } }' + }) + files, _ = resolve_imports(root) + assert len(files) == 2 # main + bar (deduplicated) + + def test_content_preserved(self): + """The returned content should preserve the original file content including import lines.""" + with tempfile.TemporaryDirectory() as tmpdir: + root = _write_temp_files(tmpdir, { + "main.flatdata": ( + 'import "bar.flatdata";\n' + 'namespace foo { struct A { x : u32 : 32; } }' + ), + "bar.flatdata": 'namespace bar { struct B { y : u32 : 32; } }' + }) + files, _ = resolve_imports(root) + main_file = next(f for f in files if f.abs_path.endswith("main.flatdata")) + assert 'import "bar.flatdata"' in main_file.content + assert 'namespace foo' in main_file.content + + def test_transitive_import(self): + """A→B→C: all three files should be included.""" + with tempfile.TemporaryDirectory() as tmpdir: + root = _write_temp_files(tmpdir, { + "a.flatdata": ( + 'import "b.flatdata";\n' + 'namespace a { struct A { x : u32 : 32; } }' + ), + "b.flatdata": ( + 'import "c.flatdata";\n' + 'namespace b { struct B { x : u32 : 32; } }' + ), + "c.flatdata": 'namespace c { struct C { x : u32 : 32; } }' + }) + files, root_imports = resolve_imports( + os.path.join(tmpdir, "a.flatdata")) + assert len(files) == 3 + # C before B before A + paths = [os.path.basename(f.abs_path) for f in files] + assert paths.index("c.flatdata") < paths.index("b.flatdata") + assert paths.index("b.flatdata") < paths.index("a.flatdata") + # Only direct imports of A returned + assert len(root_imports) == 1 + assert root_imports[0].path == "b.flatdata" + + def test_parsed_result_cached(self): + """Each ResolvedFile should carry its cached parse result.""" + with tempfile.TemporaryDirectory() as tmpdir: + root = _write_temp_files(tmpdir, { + "main.flatdata": ( + 'import "bar.flatdata";\n' + 'namespace foo { struct A { x : u32 : 32; } }' + ), + "bar.flatdata": 'namespace bar { struct B { y : u32 : 32; } }' + }) + files, _ = resolve_imports(root) + for f in files: + assert f.parsed is not None + assert "namespace" in f.parsed + main = next(f for f in files if f.abs_path.endswith("main.flatdata")) + assert "imports" in main.parsed + assert main.parsed["imports"][0]["path"] == "bar.flatdata" + + def test_parse_error_in_imported_file(self): + """A syntax error in an imported file should report the file path.""" + with tempfile.TemporaryDirectory() as tmpdir: + root = _write_temp_files(tmpdir, { + "main.flatdata": ( + 'import "bad.flatdata";\n' + 'namespace foo { struct A { x : u32 : 32; } }' + ), + "bad.flatdata": 'this is not valid flatdata syntax' + }) + with pytest.raises(ImportParsingError, match="bad.flatdata"): + resolve_imports(root) + + def test_parse_syntax_error_in_imported_file(self): + """A ParseSyntaxException (error-stop) in an imported file should be caught.""" + with tempfile.TemporaryDirectory() as tmpdir: + root = _write_temp_files(tmpdir, { + "main.flatdata": ( + 'import "bad.flatdata";\n' + 'namespace foo { struct A { x : u32 : 32; } }' + ), + "bad.flatdata": 'namespace bar { struct { } }' + }) + with pytest.raises(ImportParsingError, match="bad.flatdata"): + resolve_imports(root) + + def test_import_of_empty_file(self): + """An imported empty file should parse successfully.""" + with tempfile.TemporaryDirectory() as tmpdir: + root = _write_temp_files(tmpdir, { + "main.flatdata": ( + 'import "empty.flatdata";\n' + 'namespace foo { struct A { x : u32 : 32; } }' + ), + "empty.flatdata": '' + }) + files, _ = resolve_imports(root) + assert len(files) == 2 diff --git a/flatdata-generator/tests/tree/test_multi_file_builder.py b/flatdata-generator/tests/tree/test_multi_file_builder.py new file mode 100644 index 00000000..3ae02caa --- /dev/null +++ b/flatdata-generator/tests/tree/test_multi_file_builder.py @@ -0,0 +1,469 @@ +''' + Copyright (c) 2025 HERE Europe B.V. + See the LICENSE file in the root of this project for license details. +''' + +import os +import tempfile + +import pytest + +from flatdata.generator.tree.builder import build_ast_from_file +from flatdata.generator.tree.errors import ( + ImportFileNotFoundError, ImportParsingError, ParsingError, SymbolRedefinition) +from flatdata.generator.tree.nodes.trivial import Structure, Constant, Enumeration +from flatdata.generator.tree.nodes.archive import Archive + + +def _write_files(tmpdir, files): + """Write a dict of {relative_path: content} into tmpdir, return root path.""" + for rel_path, content in files.items(): + full = os.path.join(tmpdir, rel_path) + os.makedirs(os.path.dirname(full), exist_ok=True) + with open(full, "w") as f: + f.write(content) + return tmpdir + + +class TestBuildAstFromFile: + """Tests for multi-file AST building via build_ast_from_file.""" + + def test_single_file_no_imports(self, tmp_path): + _write_files(str(tmp_path), { + "main.flatdata": 'namespace n{ struct S { f : u8 : 8; } }' + }) + tree = build_ast_from_file(str(tmp_path / "main.flatdata")) + assert len(tree.imports) == 0 + structs = list(tree.root.iterate(Structure)) + assert any(s.name == "S" for s in structs) + assert all(s.is_local for s in structs) + + def test_simple_import(self, tmp_path): + _write_files(str(tmp_path), { + "main.flatdata": ''' +import "types.flatdata"; +namespace n{ + archive A { r : vector< S >; } +} +''', + "types.flatdata": ''' +namespace n{ struct S { f : u8 : 8; } } +''' + }) + tree = build_ast_from_file(str(tmp_path / "main.flatdata")) + types_file = os.path.realpath(str(tmp_path / "types.flatdata")) + root_file = os.path.realpath(str(tmp_path / "main.flatdata")) + + # imports contain direct imports of root + assert len(tree.imports) == 1 + assert tree.imports[0].path == "types.flatdata" + + # Nodes from both files are in the tree + structs = list(tree.root.iterate(Structure)) + assert any(s.name == "S" for s in structs) + archives = list(tree.root.iterate(Archive)) + assert any(a.name == "A" for a in archives) + + # Source file tagging + s_node = next(s for s in structs if s.name == "S") + a_node = next(a for a in archives if a.name == "A") + assert s_node.source_file == types_file + assert a_node.source_file == root_file + + # is_local + assert a_node.is_local + assert not s_node.is_local + assert tree.is_local_node(a_node) + assert not tree.is_local_node(s_node) + + def test_diamond_import(self, tmp_path): + _write_files(str(tmp_path), { + "main.flatdata": ''' +import "a.flatdata"; +import "b.flatdata"; +namespace n{ archive Main { r : vector< S >; } } +''', + "a.flatdata": ''' +import "common.flatdata"; +namespace n{ struct A { f : u8 : 8; } } +''', + "b.flatdata": ''' +import "common.flatdata"; +namespace n{ struct B { f : u8 : 8; } } +''', + "common.flatdata": ''' +namespace n{ struct S { f : u8 : 8; } } +''' + }) + tree = build_ast_from_file(str(tmp_path / "main.flatdata")) + + # All types present, no SymbolRedefinition + structs = list(tree.root.iterate(Structure)) + names = {s.name for s in structs} + assert "S" in names + assert "A" in names + assert "B" in names + + # Only direct imports of root + import_paths = {i.path for i in tree.imports} + assert import_paths == {"a.flatdata", "b.flatdata"} + + def test_cyclic_import(self, tmp_path): + _write_files(str(tmp_path), { + "parent.flatdata": ''' +import "child.flatdata"; +namespace n{ + struct ParentData { f : u8 : 8; } + archive Parent { r : vector< ChildData >; } +} +''', + "child.flatdata": ''' +import "parent.flatdata"; +namespace n{ + struct ChildData { f : u8 : 8; } + archive Child { r : vector< ParentData >; } +} +''' + }) + tree = build_ast_from_file(str(tmp_path / "parent.flatdata")) + + structs = list(tree.root.iterate(Structure)) + names = {s.name for s in structs} + assert "ParentData" in names + assert "ChildData" in names + + archives = list(tree.root.iterate(Archive)) + archive_names = {a.name for a in archives} + assert "Parent" in archive_names + assert "Child" in archive_names + + def test_cross_file_reference_resolution(self, tmp_path): + """Types from imported file can be referenced by root file.""" + _write_files(str(tmp_path), { + "main.flatdata": ''' +import "types.flatdata"; +namespace n{ + archive A { data : vector< Point >; } +} +''', + "types.flatdata": ''' +namespace n{ struct Point { x : u32 : 32; y : u32 : 32; } } +''' + }) + tree = build_ast_from_file(str(tmp_path / "main.flatdata")) + + archives = list(tree.root.iterate(Archive)) + assert len(archives) == 1 + assert archives[0].name == "A" + + def test_cross_file_enum_reference(self, tmp_path): + """Enum from imported file can be used as field type.""" + _write_files(str(tmp_path), { + "main.flatdata": ''' +import "enums.flatdata"; +namespace n{ + struct Obj { kind : .n.Kind : 8; } +} +''', + "enums.flatdata": ''' +namespace n{ + enum Kind : u8 { A = 0, B = 1 } +} +''' + }) + tree = build_ast_from_file(str(tmp_path / "main.flatdata")) + structs = list(tree.root.iterate(Structure)) + assert any(s.name == "Obj" for s in structs) + + def test_transitive_import(self, tmp_path): + """Transitive imports: main→a→b, main can use types from b.""" + _write_files(str(tmp_path), { + "main.flatdata": ''' +import "a.flatdata"; +namespace n{ archive A { r : vector< Deep >; } } +''', + "a.flatdata": ''' +import "b.flatdata"; +namespace n{ struct Mid { f : u8 : 8; } } +''', + "b.flatdata": ''' +namespace n{ struct Deep { f : u8 : 8; } } +''' + }) + tree = build_ast_from_file(str(tmp_path / "main.flatdata")) + structs = list(tree.root.iterate(Structure)) + names = {s.name for s in structs} + assert "Mid" in names + assert "Deep" in names + + def test_nested_path_import(self, tmp_path): + """Import from a subdirectory.""" + _write_files(str(tmp_path), { + "main.flatdata": ''' +import "sub/types.flatdata"; +namespace n{ archive A { r : vector< S >; } } +''', + "sub/types.flatdata": ''' +namespace n{ struct S { f : u8 : 8; } } +''' + }) + tree = build_ast_from_file(str(tmp_path / "main.flatdata")) + structs = list(tree.root.iterate(Structure)) + assert any(s.name == "S" for s in structs) + + def test_missing_import_raises_error(self, tmp_path): + _write_files(str(tmp_path), { + "main.flatdata": 'import "missing.flatdata"; namespace n{ struct S { f : u8 : 8; } }' + }) + with pytest.raises(ImportFileNotFoundError): + build_ast_from_file(str(tmp_path / "main.flatdata")) + + def test_import_parse_error_raises(self, tmp_path): + _write_files(str(tmp_path), { + "main.flatdata": 'import "bad.flatdata"; namespace n{ struct S { f : u8 : 8; } }', + "bad.flatdata": 'this is not valid flatdata' + }) + with pytest.raises(ImportParsingError): + build_ast_from_file(str(tmp_path / "main.flatdata")) + + def test_root_file_parse_error_raises_parsing_error(self, tmp_path): + """Root file with invalid syntax raises ParsingError, not ImportParsingError.""" + _write_files(str(tmp_path), { + "main.flatdata": 'this is not valid flatdata' + }) + with pytest.raises(ParsingError): + build_ast_from_file(str(tmp_path / "main.flatdata")) + + def test_symbol_redefinition_across_files(self, tmp_path): + """Same struct name in same namespace across different files → error.""" + _write_files(str(tmp_path), { + "main.flatdata": ''' +import "other.flatdata"; +namespace n{ struct S { f : u8 : 8; } } +''', + "other.flatdata": ''' +namespace n{ struct S { f : u8 : 8; } } +''' + }) + with pytest.raises(SymbolRedefinition): + build_ast_from_file(str(tmp_path / "main.flatdata")) + + +class TestSourceFileTagging: + """Tests for source_file propagation on AST nodes.""" + + def test_all_toplevel_types_tagged(self, tmp_path): + _write_files(str(tmp_path), { + "main.flatdata": ''' +namespace n{ + const u8 C = 42; + enum E : u8 { A = 0 } + struct S { f : u8 : 8; } + archive A { r : vector< S >; } +} +''' + }) + tree = build_ast_from_file(str(tmp_path / "main.flatdata")) + root_file = os.path.realpath(str(tmp_path / "main.flatdata")) + + for node_type in [Structure, Constant, Enumeration, Archive]: + for node in tree.root.iterate(node_type): + if node.name.startswith("_"): + continue # skip builtins + assert node.source_file == root_file, \ + f"{node_type.__name__} '{node.name}' not tagged with source file" + + def test_imported_nodes_tagged_with_import_file(self, tmp_path): + _write_files(str(tmp_path), { + "main.flatdata": ''' +import "lib.flatdata"; +namespace n{ archive A { r : vector< S >; } } +''', + "lib.flatdata": ''' +namespace n{ struct S { f : u8 : 8; } } +''' + }) + tree = build_ast_from_file(str(tmp_path / "main.flatdata")) + lib_file = os.path.realpath(str(tmp_path / "lib.flatdata")) + + s_node = next(s for s in tree.root.iterate(Structure) if s.name == "S") + assert s_node.source_file == lib_file + + def test_builtin_structures_tagged(self, tmp_path): + """Builtin structures created for multivectors inherit source_file.""" + _write_files(str(tmp_path), { + "main.flatdata": ''' +namespace n{ + struct A { f : u8 : 8; } + struct B { f : u8 : 8; } + archive Ar { mv : multivector< 33, A, B >; } +} +''' + }) + tree = build_ast_from_file(str(tmp_path / "main.flatdata")) + root_file = os.path.realpath(str(tmp_path / "main.flatdata")) + + # Find builtin structures (in _builtin namespace) + all_structs = list(tree.root.iterate(Structure)) + builtin_structs = [s for s in all_structs if "_builtin" in s.path] + assert len(builtin_structs) > 0, "Expected builtin structures for multivector" + + for bs in builtin_structs: + assert bs.source_file == root_file, \ + f"Builtin struct '{bs.name}' not tagged with source file" + + +class TestIsLocalNode: + """Tests for SyntaxTree.is_local_node().""" + + def test_no_file_tracking_all_local(self, tmp_path): + """When built from string (no file tracking), all nodes are local.""" + from flatdata.generator.tree.builder import build_ast + tree = build_ast('namespace n{ struct S { f : u8 : 8; } }') + structs = list(tree.root.iterate(Structure)) + for s in structs: + assert tree.is_local_node(s) + + def test_local_vs_imported(self, tmp_path): + _write_files(str(tmp_path), { + "main.flatdata": ''' +import "lib.flatdata"; +namespace n{ + struct Local { f : u8 : 8; } + archive A { r : vector< Imported >; } +} +''', + "lib.flatdata": ''' +namespace n{ struct Imported { f : u8 : 8; } } +''' + }) + tree = build_ast_from_file(str(tmp_path / "main.flatdata")) + + local = next(s for s in tree.root.iterate(Structure) if s.name == "Local") + imported = next(s for s in tree.root.iterate(Structure) if s.name == "Imported") + + assert tree.is_local_node(local) + assert not tree.is_local_node(imported) + + def test_child_nodes_inherit_locality(self, tmp_path): + """Fields and resources inherit is_local_node from parent.""" + from flatdata.generator.tree.nodes.trivial import Field + from flatdata.generator.tree.nodes.resources import Vector + + _write_files(str(tmp_path), { + "main.flatdata": ''' +import "lib.flatdata"; +namespace n{ + struct Local { f : u8 : 8; } + archive A { r : vector< Local >; } +} +''', + "lib.flatdata": ''' +namespace n{ struct Imported { g : u8 : 8; } } +''' + }) + tree = build_ast_from_file(str(tmp_path / "main.flatdata")) + + # Field of local struct + local_struct = next(s for s in tree.root.iterate(Structure) if s.name == "Local") + local_field = next(local_struct.iterate(Field)) + assert tree.is_local_node(local_field) + + # Field of imported struct + imported_struct = next(s for s in tree.root.iterate(Structure) if s.name == "Imported") + imported_field = next(imported_struct.iterate(Field)) + assert not tree.is_local_node(imported_field) + + +class TestBackwardCompatibility: + """Verify build_ast() still works unchanged.""" + + def test_build_ast_string_unchanged(self): + from flatdata.generator.tree.builder import build_ast + tree = build_ast(''' +namespace n{ + struct S { f : u8 : 8; } + archive A { r : vector< S >; } +} +''') + assert len(tree.imports) == 0 + structs = list(tree.root.iterate(Structure)) + assert any(s.name == "S" for s in structs) + # All nodes default to is_local=True + assert all(s.is_local for s in structs) + + def test_build_ast_empty_string(self): + from flatdata.generator.tree.builder import build_ast + tree = build_ast("") + assert len(tree.imports) == 0 + + +class TestReferenceNodeTagging: + """Tests for pipeline-created reference nodes being tagged.""" + + def test_builtin_structure_references_tagged(self, tmp_path): + from flatdata.generator.tree.nodes.references import BuiltinStructureReference + + _write_files(str(tmp_path), { + "main.flatdata": ''' +namespace n{ + struct A { f : u8 : 8; } + archive Ar { mv : multivector< 33, A >; } +} +''' + }) + tree = build_ast_from_file(str(tmp_path / "main.flatdata")) + root_file = os.path.realpath(str(tmp_path / "main.flatdata")) + + refs = list(tree.root.iterate(BuiltinStructureReference)) + assert len(refs) > 0 + for ref in refs: + assert ref.source_file == root_file + assert ref.is_local + + def test_constant_value_references_tagged(self, tmp_path): + from flatdata.generator.tree.nodes.references import ConstantValueReference + + _write_files(str(tmp_path), { + "main.flatdata": ''' +namespace n{ + const u8 C = 42; + struct S { f : u8 : 8; } + archive A { r : vector< S >; } +} +''' + }) + tree = build_ast_from_file(str(tmp_path / "main.flatdata")) + root_file = os.path.realpath(str(tmp_path / "main.flatdata")) + + refs = list(tree.root.iterate(ConstantValueReference)) + assert len(refs) > 0 + for ref in refs: + assert ref.source_file == root_file + assert ref.is_local + + +class TestMultipleNamespacesAcrossFiles: + """Tests for files defining different namespaces.""" + + def test_different_namespaces_across_files(self, tmp_path): + _write_files(str(tmp_path), { + "main.flatdata": ''' +import "other.flatdata"; +namespace a{ struct S { f : u8 : 8; } } +''', + "other.flatdata": ''' +namespace b{ struct T { g : u8 : 8; } } +''' + }) + tree = build_ast_from_file(str(tmp_path / "main.flatdata")) + + structs = list(tree.root.iterate(Structure)) + names = {s.name for s in structs} + assert "S" in names + assert "T" in names + + s_node = next(s for s in structs if s.name == "S") + t_node = next(s for s in structs if s.name == "T") + assert s_node.is_local + assert not t_node.is_local diff --git a/flatdata-py/flatdata/lib/flatdata_writer.py b/flatdata-py/flatdata/lib/flatdata_writer.py index c41128a8..8a61721c 100644 --- a/flatdata-py/flatdata/lib/flatdata_writer.py +++ b/flatdata-py/flatdata/lib/flatdata_writer.py @@ -8,6 +8,7 @@ from typing import Any, TYPE_CHECKING from flatdata.generator.engine import Engine +from flatdata.generator.tree.nodes.archive import Archive from flatdata.generator.tree.errors import FlatdataSyntaxError from .resource_storage import ResourceStorage @@ -28,23 +29,46 @@ class Writer: def __init__(self, archive_schema: str, path: str, archive_name: str = "") -> None: ''' - Creates instance or Writer class. Archive module is rendered by engine - using provided schema. + Creates instance of Writer class from a schema string. + The schema must be self-contained (no import statements). :param archive_schema(str): flatdata schema :param path(str): file path where flatdata files are created + :param archive_name(str): name of the archive (inferred if empty) ''' try: - if not archive_name: - archive_name = Writer._get_archive_name( - archive_schema) - _, archive_type = Engine(archive_schema).render_python_module( - archive_name=archive_name + "Builder") + engine = Engine(archive_schema) + self._init_from_engine(engine, path, archive_name) except FlatdataSyntaxError as err: raise RuntimeError( - "Error in generating modules from provided schema: %s " % err) + "Error in generating modules from provided schema: %s " % err) from err - self.builder: ArchiveBuilder = archive_type( + @classmethod + def from_file(cls, schema_path: str, path: str, archive_name: str = "") -> 'Writer': + ''' + Creates instance of Writer class from a schema file, resolving imports. + + :param schema_path(str): path to the flatdata schema file + :param path(str): file path where flatdata files are created + :param archive_name(str): name of the archive (inferred if empty) + ''' + writer = cls.__new__(cls) + try: + engine = Engine.from_file(schema_path) + writer._init_from_engine(engine, path, archive_name) + except FlatdataSyntaxError as err: + raise RuntimeError( + "Error in generating modules from provided schema: %s " % err) from err + return writer + + def _init_from_engine(self, engine: Engine, path: str, archive_name: str) -> None: + '''Shared initialization from an Engine instance.''' + if not archive_name: + archive_name = Writer._find_archive_name(engine) + module, archive_type = engine.render_python_module( + archive_name=archive_name) + builder_type = getattr(module, archive_type.__name__ + "Builder") + self.builder: ArchiveBuilder = builder_type( ResourceStorage(FileResourceWriter(), path)) def set(self, resource_name: str, resource_data: Any) -> None: @@ -61,19 +85,22 @@ def finish(self) -> None: '''Completes flatdata creation''' self.builder.finish() - @classmethod - def _get_archive_name(cls, archive_schema: str) -> str: + @staticmethod + def _find_archive_name(engine: Engine) -> str: ''' - Returns name of archive from flatdata schema. + Finds the archive name from the AST, preferring local archives. - :param archive_schema(str): flatdata schema in str + :raises RuntimeError: if no archive or multiple ambiguous archives found ''' - if not archive_schema: - raise RuntimeError("Archive schema is required") + all_archives = list(engine.tree.root.iterate(Archive)) + local_archives = [a for a in all_archives if a.is_local] + + # Prefer local archives when imports are present + candidates = local_archives if local_archives else all_archives - archive_keyword = "archive" - index = archive_schema.find(archive_keyword) + len(archive_keyword) - if archive_schema[index:].find(archive_keyword) >= 0: + if len(candidates) == 0: + raise RuntimeError("No archive found in schema") + if len(candidates) > 1: raise RuntimeError( "Schema contains multiple archives, please specify archive name explicitly") - return archive_schema[index:index+archive_schema[index:].find("{")].strip() + return candidates[0].name diff --git a/flatdata-py/flatdata/lib/writer.py b/flatdata-py/flatdata/lib/writer.py index 9ad3d08b..62f5e8bd 100644 --- a/flatdata-py/flatdata/lib/writer.py +++ b/flatdata-py/flatdata/lib/writer.py @@ -31,13 +31,10 @@ def main() -> None: raise RuntimeError( "Specified json file %s doesn't exists" % args.json_file) - with open(args.schema, 'r') as schema_file: - schema = schema_file.read() - with open(args.json_file, 'r') as json_file: data = json.load(json_file) - archive_writer = Writer(schema, args.path, args.archive_name) + archive_writer = Writer.from_file(args.schema, args.path, args.archive_name or "") archive_writer.set(args.resource_name, data) archive_writer.finish() diff --git a/flatdata-py/tests/test_writer_imports.py b/flatdata-py/tests/test_writer_imports.py new file mode 100644 index 00000000..4ed7d600 --- /dev/null +++ b/flatdata-py/tests/test_writer_imports.py @@ -0,0 +1,174 @@ +''' + Copyright (c) 2025 HERE Europe B.V. + See the LICENSE file in the root of this project for license details. +''' + +import os + +import pytest + +from flatdata.lib.flatdata_writer import Writer +from flatdata.generator.engine import Engine + + +def _write_files(tmpdir, files): + """Write a dict of {relative_path: content} into tmpdir.""" + for rel_path, content in files.items(): + full = os.path.join(tmpdir, rel_path) + os.makedirs(os.path.dirname(full), exist_ok=True) + with open(full, "w") as f: + f.write(content) + + +class TestWriterFromFile: + """Tests for Writer.from_file() with import support.""" + + def test_from_file_with_imports(self, tmp_path): + """Writer.from_file() works with schemas that use imports.""" + _write_files(str(tmp_path), { + "main.flatdata": ''' +import "types.flatdata"; +namespace n{ + archive A { r : vector< S >; } +} +''', + "types.flatdata": ''' +namespace n{ struct S { f : u8 : 8; } } +''' + }) + writer = Writer.from_file( + str(tmp_path / "main.flatdata"), + str(tmp_path / "output"), + ) + assert writer.builder is not None + + def test_from_file_no_imports(self, tmp_path): + """Writer.from_file() works with single-file schemas.""" + _write_files(str(tmp_path), { + "main.flatdata": ''' +namespace n{ + struct S { f : u8 : 8; } + archive A { r : vector< S >; } +} +''' + }) + writer = Writer.from_file( + str(tmp_path / "main.flatdata"), + str(tmp_path / "output"), + ) + assert writer.builder is not None + + def test_from_file_explicit_archive_name(self, tmp_path): + """Writer.from_file() accepts explicit archive name.""" + _write_files(str(tmp_path), { + "main.flatdata": ''' +import "types.flatdata"; +namespace n{ + archive MyArchive { r : vector< S >; } +} +''', + "types.flatdata": ''' +namespace n{ struct S { f : u8 : 8; } } +''' + }) + writer = Writer.from_file( + str(tmp_path / "main.flatdata"), + str(tmp_path / "output"), + archive_name="MyArchive", + ) + assert writer.builder is not None + + def test_from_file_infers_local_archive(self, tmp_path): + """Writer.from_file() infers the local archive even when imports define other archives.""" + _write_files(str(tmp_path), { + "main.flatdata": ''' +import "lib.flatdata"; +namespace n{ + archive Main { r : vector< .lib.S >; } +} +''', + "lib.flatdata": ''' +namespace lib{ + struct S { f : u8 : 8; } + archive Lib { r : vector< S >; } +} +''' + }) + writer = Writer.from_file( + str(tmp_path / "main.flatdata"), + str(tmp_path / "output"), + ) + assert writer.builder is not None + + def test_from_file_missing_file(self, tmp_path): + """Writer.from_file() raises RuntimeError for missing schema file.""" + with pytest.raises(RuntimeError): + Writer.from_file( + str(tmp_path / "nonexistent.flatdata"), + str(tmp_path / "output"), + ) + + +class TestWriterStringConstructor: + """Tests for Writer(schema_string) backward compatibility.""" + + def test_string_constructor_works(self, tmp_path): + """Writer(schema_string) still works for self-contained schemas.""" + schema = ''' +namespace n{ + struct S { f : u8 : 8; } + archive A { r : vector< S >; } +} +''' + writer = Writer(schema, str(tmp_path / "output")) + assert writer.builder is not None + + def test_string_constructor_rejects_imports(self, tmp_path): + """Writer(schema_string) raises RuntimeError when schema has imports.""" + schema = ''' +import "types.flatdata"; +namespace n{ + archive A { r : vector< S >; } +} +''' + with pytest.raises(RuntimeError, match="import"): + Writer(schema, str(tmp_path / "output")) + + +class TestInspectorSchemaPattern: + """Verify the inspector's Engine(schema_string) pattern works for embedded schemas.""" + + def test_embedded_schema_renders_module(self): + """Engine(embedded_schema_string) renders correctly — no imports in embedded schemas.""" + schema = ''' +namespace n{ + struct S { f : u8 : 8; } + archive A { r : vector< S >; } +} +''' + engine = Engine(schema) + module, archive_type = engine.render_python_module( + archive_name="A") + assert archive_type is not None + assert hasattr(module, "n_A") + + def test_embedded_schema_from_imports_is_self_contained(self, tmp_path): + """Schema embedding produces self-contained schemas with no import statements.""" + _write_files(str(tmp_path), { + "main.flatdata": ''' +import "types.flatdata"; +namespace n{ + archive A { r : vector< S >; } +} +''', + "types.flatdata": ''' +namespace n{ struct S { f : u8 : 8; } } +''' + }) + engine = Engine.from_file(str(tmp_path / "main.flatdata")) + output = engine.render("flatdata") + # Embedded flatdata schema should be self-contained (no imports) + assert "import" not in output + # All types present + assert "S" in output + assert "archive A" in output diff --git a/flatdata-rs/tests/features/src/imports/cross_namespace/main_schema.rs b/flatdata-rs/tests/features/src/imports/cross_namespace/main_schema.rs new file mode 100644 index 00000000..5c709302 --- /dev/null +++ b/flatdata-rs/tests/features/src/imports/cross_namespace/main_schema.rs @@ -0,0 +1,25 @@ +#![allow(dead_code)] + +include!(concat!(env!("OUT_DIR"), "/imports/cross_namespace/main.rs")); + +#[test] +fn test_cross_namespace_imported_enum() { + let storage = flatdata::MemoryResourceStorage::new("/imports_cross_ns"); + + let mut data = flatdata::Vector::::new(); + let item = data.grow(); + item.set_id(7); + item.set_kind(defs::Kind::B); + + let builder = + app::MainBuilder::new(storage.clone()).expect("Failed to create builder"); + builder + .set_entries(&data.as_view()) + .expect("Failed to set entries"); + + let archive = app::Main::open(storage).expect("Failed to open archive"); + let entries = archive.entries(); + assert_eq!(entries.len(), 1); + assert_eq!(entries[0].id(), 7); + assert_eq!(entries[0].kind(), defs::Kind::B); +} diff --git a/flatdata-rs/tests/features/src/imports/cross_namespace/mod.rs b/flatdata-rs/tests/features/src/imports/cross_namespace/mod.rs new file mode 100644 index 00000000..5be68695 --- /dev/null +++ b/flatdata-rs/tests/features/src/imports/cross_namespace/mod.rs @@ -0,0 +1,2 @@ +pub mod other; +pub mod main_schema; diff --git a/flatdata-rs/tests/features/src/imports/cross_namespace/other.rs b/flatdata-rs/tests/features/src/imports/cross_namespace/other.rs new file mode 100644 index 00000000..d85de9b9 --- /dev/null +++ b/flatdata-rs/tests/features/src/imports/cross_namespace/other.rs @@ -0,0 +1,3 @@ +#![allow(dead_code)] + +include!(concat!(env!("OUT_DIR"), "/imports/cross_namespace/other.rs")); diff --git a/flatdata-rs/tests/features/src/imports/mod.rs b/flatdata-rs/tests/features/src/imports/mod.rs new file mode 100644 index 00000000..281d799b --- /dev/null +++ b/flatdata-rs/tests/features/src/imports/mod.rs @@ -0,0 +1,2 @@ +pub mod simple; +pub mod cross_namespace; diff --git a/flatdata-rs/tests/features/src/imports/simple/main_schema.rs b/flatdata-rs/tests/features/src/imports/simple/main_schema.rs new file mode 100644 index 00000000..ecccfa36 --- /dev/null +++ b/flatdata-rs/tests/features/src/imports/simple/main_schema.rs @@ -0,0 +1,24 @@ +#![allow(dead_code)] + +include!(concat!(env!("OUT_DIR"), "/imports/simple/main.rs")); + +#[test] +fn test_imported_types_in_archive() { + let storage = flatdata::MemoryResourceStorage::new("/imports_simple"); + + let mut data = flatdata::Vector::::new(); + let item = data.grow(); + item.set_x(42); + item.set_y(100); + + let builder = app::ABuilder::new(storage.clone()).expect("Failed to create builder"); + builder + .set_data(&data.as_view()) + .expect("Failed to set data"); + + let archive = app::A::open(storage).expect("Failed to open archive"); + let data = archive.data(); + assert_eq!(data.len(), 1); + assert_eq!(data[0].x(), 42); + assert_eq!(data[0].y(), 100); +} diff --git a/flatdata-rs/tests/features/src/imports/simple/mod.rs b/flatdata-rs/tests/features/src/imports/simple/mod.rs new file mode 100644 index 00000000..1cfad9f6 --- /dev/null +++ b/flatdata-rs/tests/features/src/imports/simple/mod.rs @@ -0,0 +1,2 @@ +pub mod types; +pub mod main_schema; diff --git a/flatdata-rs/tests/features/src/imports/simple/types.rs b/flatdata-rs/tests/features/src/imports/simple/types.rs new file mode 100644 index 00000000..cc5f1840 --- /dev/null +++ b/flatdata-rs/tests/features/src/imports/simple/types.rs @@ -0,0 +1,3 @@ +#![allow(dead_code)] + +include!(concat!(env!("OUT_DIR"), "/imports/simple/types.rs")); diff --git a/flatdata-rs/tests/features/src/lib.rs b/flatdata-rs/tests/features/src/lib.rs index ee9149cb..859d0177 100644 --- a/flatdata-rs/tests/features/src/lib.rs +++ b/flatdata-rs/tests/features/src/lib.rs @@ -4,3 +4,4 @@ pub mod archives; pub mod constants; pub mod enums; pub mod structs; +pub mod imports; diff --git a/test_cases/imports/cross_namespace/main.flatdata b/test_cases/imports/cross_namespace/main.flatdata new file mode 100644 index 00000000..661a4f09 --- /dev/null +++ b/test_cases/imports/cross_namespace/main.flatdata @@ -0,0 +1,10 @@ +import "other.flatdata"; +namespace app { + archive Main { + entries : vector< Entry >; + } + struct Entry { + id : u32 : 32; + kind : .defs.Kind : 8; + } +} diff --git a/test_cases/imports/cross_namespace/other.flatdata b/test_cases/imports/cross_namespace/other.flatdata new file mode 100644 index 00000000..e66fb85c --- /dev/null +++ b/test_cases/imports/cross_namespace/other.flatdata @@ -0,0 +1,7 @@ +namespace defs { + enum Kind : u8 { + A = 0, + B = 1, + C = 2 + } +} diff --git a/test_cases/imports/cyclic/child.flatdata b/test_cases/imports/cyclic/child.flatdata new file mode 100644 index 00000000..a87686f0 --- /dev/null +++ b/test_cases/imports/cyclic/child.flatdata @@ -0,0 +1,6 @@ +import "parent.flatdata"; +namespace m { + struct Item { + value : u64 : 64; + } +} diff --git a/test_cases/imports/cyclic/parent.flatdata b/test_cases/imports/cyclic/parent.flatdata new file mode 100644 index 00000000..8e5b4d24 --- /dev/null +++ b/test_cases/imports/cyclic/parent.flatdata @@ -0,0 +1,6 @@ +import "child.flatdata"; +namespace m { + archive Parent { + items : vector< Item >; + } +} diff --git a/test_cases/imports/diamond/a.flatdata b/test_cases/imports/diamond/a.flatdata new file mode 100644 index 00000000..0ca9419b --- /dev/null +++ b/test_cases/imports/diamond/a.flatdata @@ -0,0 +1,7 @@ +import "common.flatdata"; +namespace ext_a { + struct Label { + id : u32 : 32; + kind : .common.Color : 8; + } +} diff --git a/test_cases/imports/diamond/b.flatdata b/test_cases/imports/diamond/b.flatdata new file mode 100644 index 00000000..c118ddf5 --- /dev/null +++ b/test_cases/imports/diamond/b.flatdata @@ -0,0 +1,7 @@ +import "common.flatdata"; +namespace ext_b { + struct Tag { + id : u32 : 32; + kind : .common.Color : 8; + } +} diff --git a/test_cases/imports/diamond/common.flatdata b/test_cases/imports/diamond/common.flatdata new file mode 100644 index 00000000..77277292 --- /dev/null +++ b/test_cases/imports/diamond/common.flatdata @@ -0,0 +1,11 @@ +namespace common { + struct Point { + x : i32 : 32; + y : i32 : 32; + } + enum Color : u8 { + Red = 0, + Green = 1, + Blue = 2 + } +} diff --git a/test_cases/imports/diamond/main.flatdata b/test_cases/imports/diamond/main.flatdata new file mode 100644 index 00000000..a2abeba1 --- /dev/null +++ b/test_cases/imports/diamond/main.flatdata @@ -0,0 +1,9 @@ +import "a.flatdata"; +import "b.flatdata"; +namespace app { + archive Main { + points : vector< .common.Point >; + labels : vector< .ext_a.Label >; + tags : vector< .ext_b.Tag >; + } +} diff --git a/test_cases/imports/nested_path/main.flatdata b/test_cases/imports/nested_path/main.flatdata new file mode 100644 index 00000000..4de3e8d9 --- /dev/null +++ b/test_cases/imports/nested_path/main.flatdata @@ -0,0 +1,6 @@ +import "sub/types.flatdata"; +namespace app { + archive A { + data : vector< .geo.Coord >; + } +} diff --git a/test_cases/imports/nested_path/sub/types.flatdata b/test_cases/imports/nested_path/sub/types.flatdata new file mode 100644 index 00000000..e4e239f7 --- /dev/null +++ b/test_cases/imports/nested_path/sub/types.flatdata @@ -0,0 +1,6 @@ +namespace geo { + struct Coord { + lat : i32 : 32; + lon : i32 : 32; + } +} diff --git a/test_cases/imports/simple/main.flatdata b/test_cases/imports/simple/main.flatdata new file mode 100644 index 00000000..39610c3b --- /dev/null +++ b/test_cases/imports/simple/main.flatdata @@ -0,0 +1,6 @@ +import "types.flatdata"; +namespace app { + archive A { + data : vector< .import_types.S >; + } +} diff --git a/test_cases/imports/simple/types.flatdata b/test_cases/imports/simple/types.flatdata new file mode 100644 index 00000000..6edb2afc --- /dev/null +++ b/test_cases/imports/simple/types.flatdata @@ -0,0 +1,6 @@ +namespace import_types { + struct S { + x : u32 : 32; + y : u32 : 32; + } +} diff --git a/test_cases/imports/transitive/lib.flatdata b/test_cases/imports/transitive/lib.flatdata new file mode 100644 index 00000000..5b08fe9c --- /dev/null +++ b/test_cases/imports/transitive/lib.flatdata @@ -0,0 +1,9 @@ +namespace lib { + struct Base { + value : u32 : 32; + } + enum Kind : u8 { + X = 0, + Y = 1 + } +} diff --git a/test_cases/imports/transitive/main.flatdata b/test_cases/imports/transitive/main.flatdata new file mode 100644 index 00000000..df4b7bed --- /dev/null +++ b/test_cases/imports/transitive/main.flatdata @@ -0,0 +1,6 @@ +import "mid.flatdata"; +namespace app { + archive A { + data : vector< .lib.Base >; + } +} diff --git a/test_cases/imports/transitive/mid.flatdata b/test_cases/imports/transitive/mid.flatdata new file mode 100644 index 00000000..f93b500a --- /dev/null +++ b/test_cases/imports/transitive/mid.flatdata @@ -0,0 +1,6 @@ +import "lib.flatdata"; +namespace mid { + struct Wrapper { + kind : .lib.Kind : 8; + } +}