diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 0000000..40153a2 --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,62 @@ +name: Release + +on: + # Trigger when a release is published via GitHub UI + release: + types: [published] + +permissions: + contents: write + +jobs: + build-and-attach: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: "3.12" + + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install build + + - name: Build package + run: python -m build + + - name: Upload release artifacts + uses: softprops/action-gh-release@v1 + with: + files: | + dist/* + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + + # Optional: Publish to PyPI (uncomment when ready) + # publish-pypi: + # needs: release + # runs-on: ubuntu-latest + # steps: + # - uses: actions/checkout@v4 + # + # - name: Set up Python + # uses: actions/setup-python@v5 + # with: + # python-version: "3.12" + # + # - name: Install dependencies + # run: | + # python -m pip install --upgrade pip + # pip install build twine + # + # - name: Build package + # run: python -m build + # + # - name: Publish to PyPI + # env: + # TWINE_USERNAME: __token__ + # TWINE_PASSWORD: ${{ secrets.PYPI_API_TOKEN }} + # run: twine upload dist/* diff --git a/CHANGELOG.md b/CHANGELOG.md index ae905df..df46649 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,21 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +## [3.0.1] - 2026-01-23 + +### Fixed + +- `dataprov-visualize`: Nested provenance bundles now rendered as subgraphs +- `dataprov-visualize`: Added `--normalize-paths` option to handle path prefix mismatches +- `dataprov-visualize`: Fixed tool name lookup to support both `dataprov:name` and `dataprov:toolName` +- `dataprov-report`: Inputs with nested provenance now show bundle contents + +### Added + +- `dataprov-visualize`: `--flatten-bundles` option to hide nested bundles +- `dataprov-report`: `--flatten-bundles` option to hide nested bundles +- Dashed "provenance" edges in DOT output connecting bundle outputs to main chain + ## [3.0.0] - 2025-12-10 Initial public release. @@ -37,4 +52,5 @@ Initial public release. - RDF/Turtle and JSON-LD ontology definitions - W3C PROV-JSON schema included +[3.0.1]: https://github.com/RI-SE/dataprov/releases/tag/v3.0.1 [3.0.0]: https://github.com/RI-SE/dataprov/releases/tag/v3.0.0 diff --git a/README.md b/README.md index 2ee597c..a96b159 100644 --- a/README.md +++ b/README.md @@ -5,6 +5,7 @@ # dataprov [![CI](https://github.com/RI-SE/dataprov/actions/workflows/ci.yml/badge.svg)](https://github.com/RI-SE/dataprov/actions/workflows/ci.yml) +[![GitHub Release](https://img.shields.io/github/v/release/RI-SE/dataprov)](https://github.com/RI-SE/dataprov/releases/latest) [![Python 3.10+](https://img.shields.io/badge/python-3.10%2B-blue.svg)](https://www.python.org/downloads/) [![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT) [![Ruff](https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/astral-sh/ruff/main/assets/badge/v2.json)](https://github.com/astral-sh/ruff) @@ -33,28 +34,62 @@ A lightweight Python library for tracking data provenance through processing pip ## Contents -- [Installation](#installation) -- [Quick Start](#quick-start) -- [Usage Examples](#usage-examples) - - [Execution Timing](#execution-timing) - - [Provenance File Inlining](#provenance-file-inlining) - - [Agent Tracking](#agent-tracking) - - [Attribution (wasAttributedTo)](#attribution-wasattributedto) - - [Custom Ontologies](#custom-ontologies) - - [Environment Capture](#environment-capture) - - [Enhanced Queries](#enhanced-queries) - - [Visualization](#visualization) -- [CLI Tools](#cli-tools) -- [API Reference](#api-reference) -- [Data Readiness Levels (DRL)](#data-readiness-levels-drl) -- [W3C PROV-JSON Format](#w3c-prov-json-format) -- [Dataprov Ontology](#dataprov-ontology) -- [Use Case Examples](#use-case-examples) -- [Comparison with Other Provenance Systems](#comparison-with-other-provenance-systems) -- [Project Structure](#project-structure) -- [Testing](#testing) -- [Schema Version](#schema-version) -- [Acknowledgement](#acknowledgement) +- [dataprov](#dataprov) + - [Features](#features) + - [Contents](#contents) + - [Installation](#installation) + - [Using dataprov in Your Project](#using-dataprov-in-your-project) + - [Local Development Install](#local-development-install) + - [Quick Start](#quick-start) + - [Creating a Provenance Chain](#creating-a-provenance-chain) + - [Loading and Extending Chains](#loading-and-extending-chains) + - [Access the W3C PROV-JSON schema](#access-the-w3c-prov-json-schema) + - [Usage Examples](#usage-examples) + - [Execution Timing](#execution-timing) + - [Provenance File Inlining](#provenance-file-inlining) + - [Agent Tracking](#agent-tracking) + - [Attribution (wasAttributedTo)](#attribution-wasattributedto) + - [Basic Usage](#basic-usage) + - [Multiple Files](#multiple-files) + - [Automation Features](#automation-features) + - [Use Case Example](#use-case-example) + - [Custom Ontologies](#custom-ontologies) + - [Define Custom Namespaces](#define-custom-namespaces) + - [Add Custom Properties with Target Prefixes](#add-custom-properties-with-target-prefixes) + - [Add Custom Properties with add\_attribution()](#add-custom-properties-with-add_attribution) + - [Add Top-Level Custom Metadata](#add-top-level-custom-metadata) + - [Creating Your Own Ontology](#creating-your-own-ontology) + - [Environment Capture](#environment-capture) + - [Enhanced Queries](#enhanced-queries) + - [Precise Input-Output Mapping](#precise-input-output-mapping) + - [Visualization](#visualization) + - [CLI Tools](#cli-tools) + - [dataprov-new](#dataprov-new) + - [dataprov-visualize](#dataprov-visualize) + - [dataprov-add-attribution](#dataprov-add-attribution) + - [dataprov-report](#dataprov-report) + - [API Reference](#api-reference) + - [ProvenanceChain Class](#provenancechain-class) + - [Class Methods](#class-methods) + - [Instance Methods](#instance-methods) + - [Data Readiness Levels (DRL)](#data-readiness-levels-drl) + - [W3C PROV-JSON Format](#w3c-prov-json-format) + - [Structure Overview](#structure-overview) + - [Example PROV-JSON File](#example-prov-json-file) + - [PROV Bundles](#prov-bundles) + - [Dataprov Ontology](#dataprov-ontology) + - [Key Features](#key-features) + - [Core Properties by Domain](#core-properties-by-domain) + - [Ontology Documentation](#ontology-documentation) + - [Use Case Examples](#use-case-examples) + - [Video Processing Pipelines](#video-processing-pipelines) + - [Linking Multiple Provenance Chains](#linking-multiple-provenance-chains) + - [Comparison with Other Provenance Systems](#comparison-with-other-provenance-systems) + - [W3C PROV-JSON Compatibility](#w3c-prov-json-compatibility) + - [Project Structure](#project-structure) + - [Testing](#testing) + - [Schema Version](#schema-version) + - [Acknowledgement](#acknowledgement) ## Installation @@ -96,7 +131,7 @@ dataprov>=3.0.0 ```bash # Clone the repository -git clone +git clone https://github.com/RI-SE/dataprov.git cd dataprov # Install package with development dependencies using uv @@ -107,7 +142,7 @@ uv sync --dev ```bash # Clone the repository -git clone +git clone https://github.com/RI-SE/dataprov.git cd dataprov # Create and activate virtual environment @@ -770,6 +805,12 @@ dataprov-visualize provenance.json | dot -Tpng -o provenance.png # Generate SVG dataprov-visualize provenance.json | dot -Tsvg -o provenance.svg + +# Hide nested provenance bundles (show only main chain) +dataprov-visualize provenance.json --flatten-bundles | dot -Tpng -o simple.png + +# Normalize paths to handle path prefix mismatches between steps +dataprov-visualize provenance.json --normalize-paths | dot -Tpng -o normalized.png ``` ### dataprov-add-attribution @@ -811,6 +852,9 @@ Generate HTML report: ```bash # Generate HTML report dataprov-report provenance.json -o report.html + +# Hide nested provenance bundles (show only main chain) +dataprov-report provenance.json --flatten-bundles -o simple_report.html ``` The HTML report includes: @@ -819,6 +863,7 @@ The HTML report includes: - Agent/user information per step - Environment information per step - File checksums and sizes +- Nested provenance bundles for inputs (showing how input files were created) - Interactive styling ## API Reference @@ -960,10 +1005,13 @@ Validate chain integrity and schema compliance. Returns: `tuple` - `(is_valid, list_of_errors)` -**`to_dot()`** +**`to_dot(include_bundles=True, normalize_paths=False)`** Generate GraphViz DOT format visualization. +- `include_bundles` (bool): If True (default), render nested provenance bundles as subgraphs +- `normalize_paths` (bool): If True, match entities by filename when full paths don't match (helps with path prefix mismatches between processing steps) + Returns: `str` - DOT format graph ## Data Readiness Levels (DRL) diff --git a/dataprov/__init__.py b/dataprov/__init__.py index 11a4320..f8c7c01 100644 --- a/dataprov/__init__.py +++ b/dataprov/__init__.py @@ -34,7 +34,7 @@ from dataprov.dataprov import ProvenanceChain -__version__ = "3.0.0" +__version__ = "3.0.1" __all__ = ["ProvenanceChain", "get_schema"] diff --git a/dataprov/cli/report.py b/dataprov/cli/report.py index 1a281f3..c36f7a8 100644 --- a/dataprov/cli/report.py +++ b/dataprov/cli/report.py @@ -7,16 +7,90 @@ """ import argparse +import contextlib import sys from datetime import datetime from pathlib import Path -def generate_html_report(chain) -> str: +def _render_nested_bundle(bundle_id: str, bundle_content: dict) -> str: + """Render a nested provenance bundle as HTML. + + Args: + bundle_id: The bundle identifier + bundle_content: The bundle's PROV-JSON content + + Returns: + str: HTML snippet for the nested bundle + """ + parts = [] + parts.append('
\n') + parts.append(f"

Nested Provenance: {bundle_id}

\n") + + # Get activities sorted by step number + activities = [] + for act_id, act in bundle_content.get("activity", {}).items(): + step_num = 0 + if "_" in act_id: + with contextlib.suppress(ValueError): + step_num = int(act_id.split("_")[-1]) + + # Find agent info + tool_name = "unknown" + for assoc in bundle_content.get("wasAssociatedWith", {}).values(): + if assoc.get("prov:activity") == act_id: + agent_id = assoc.get("prov:agent") + agent = bundle_content.get("agent", {}).get(agent_id, {}) + tool_name = agent.get( + "dataprov:name", agent.get("dataprov:toolName", "unknown") + ) + break + + operation = act.get("dataprov:operation", "unknown") + activities.append((step_num, act_id, tool_name, operation)) + + activities.sort(key=lambda x: x[0]) + + # Find inputs and outputs for each activity + for _step_num, act_id, tool_name, operation in activities: + parts.append('
\n') + parts.append(f'
{tool_name}: {operation}
\n') + + # Find inputs (used relationships) + inputs = [] + for usage in bundle_content.get("used", {}).values(): + if usage.get("prov:activity") == act_id: + ent_id = usage.get("prov:entity", "") + path = ent_id.replace("entity:", "") + inputs.append(Path(path).name) + + # Find outputs (wasGeneratedBy relationships) + outputs = [] + for gen in bundle_content.get("wasGeneratedBy", {}).values(): + if gen.get("prov:activity") == act_id: + ent_id = gen.get("prov:entity", "") + path = ent_id.replace("entity:", "") + outputs.append(Path(path).name) + + if inputs: + parts.append(f'
Inputs: {", ".join(inputs)}
\n') + if outputs: + parts.append( + f'
Outputs: {", ".join(outputs)}
\n' + ) + + parts.append("
\n") + + parts.append("
\n") + return "".join(parts) + + +def generate_html_report(chain, include_bundles: bool = True) -> str: """Generate an HTML report from provenance chain. Args: chain: ProvenanceChain instance + include_bundles: If True, show nested provenance bundles for inputs Returns: str: HTML report content @@ -183,6 +257,39 @@ def generate_html_report(chain) -> str: color: #666; font-size: 0.9em; }} + .provenance-info {{ + background-color: #fff8e1; + border-left: 3px solid #ffc107; + padding: 8px; + margin: 5px 0 5px 20px; + font-size: 0.85em; + }} + .provenance-info .prov-label {{ + font-weight: bold; + color: #f57c00; + }} + .nested-bundle {{ + background-color: #fffde7; + border: 1px solid #fff59d; + border-radius: 6px; + padding: 15px; + margin: 10px 0 10px 20px; + }} + .nested-bundle h4 {{ + margin: 0 0 10px 0; + color: #f57c00; + font-size: 1em; + }} + .nested-step {{ + background-color: white; + border-left: 3px solid #ffb74d; + padding: 10px; + margin: 8px 0; + }} + .nested-step-title {{ + font-weight: bold; + color: #ff9800; + }} .footer {{ text-align: center; margin-top: 40px; @@ -344,6 +451,30 @@ def generate_html_report(chain) -> str: html_parts.append( f'
Checksum: {checksum}
\n' ) + + # Show provenance info for this input if available + prov_file = inp.get("provenance_file") + if prov_file and include_bundles: + prov_checksum = inp.get("provenance_file_checksum", "") + html_parts.append('
\n') + html_parts.append( + f' Provenance: {prov_file}\n' + ) + if prov_checksum: + html_parts.append( + f'
Checksum: {prov_checksum}
\n' + ) + + # If bundle data is available, render nested steps + if prov_file.startswith("bundle:"): + bundle_content = chain.data.get("bundle", {}).get(prov_file, {}) + if bundle_content: + html_parts.append( + _render_nested_bundle(prov_file, bundle_content) + ) + + html_parts.append("
\n") + html_parts.append(" \n") html_parts.append(" \n") @@ -433,6 +564,12 @@ def main(): "-o", "--output", help="Output HTML file (default: stdout)", default=None ) + parser.add_argument( + "--flatten-bundles", + action="store_true", + help="Hide nested provenance bundles (only show main chain)", + ) + args = parser.parse_args() # Validate input file @@ -452,7 +589,9 @@ def main(): # Generate HTML report try: - html_report = generate_html_report(chain) + html_report = generate_html_report( + chain, include_bundles=not args.flatten_bundles + ) except Exception as e: print(f"Error generating HTML report: {e}", file=sys.stderr) return 1 diff --git a/dataprov/cli/visualize.py b/dataprov/cli/visualize.py index 6b5e30d..97917a6 100644 --- a/dataprov/cli/visualize.py +++ b/dataprov/cli/visualize.py @@ -39,6 +39,19 @@ def main(): default=None, ) + parser.add_argument( + "--flatten-bundles", + action="store_true", + help="Hide nested bundles (only show main chain)", + ) + + parser.add_argument( + "--normalize-paths", + action="store_true", + help="Match entities by filename when full paths don't match " + "(helps with path prefix mismatches between processing steps)", + ) + args = parser.parse_args() # Validate input file @@ -58,7 +71,10 @@ def main(): # Generate DOT graph try: - dot_graph = chain.to_dot() + dot_graph = chain.to_dot( + include_bundles=not args.flatten_bundles, + normalize_paths=args.normalize_paths, + ) except Exception as e: print(f"Error generating DOT graph: {e}", file=sys.stderr) return 1 diff --git a/dataprov/dataprov.py b/dataprov/dataprov.py index dc17c8f..1f9be2b 100644 --- a/dataprov/dataprov.py +++ b/dataprov/dataprov.py @@ -1512,34 +1512,65 @@ def validate(self) -> tuple[bool, list[str]]: return (len(errors) == 0, errors) - def to_dot(self) -> str: + def to_dot( + self, include_bundles: bool = True, normalize_paths: bool = False + ) -> str: """Generate GraphViz DOT format representation of the provenance chain. + Args: + include_bundles: If True, render nested bundles as subgraphs + normalize_paths: If True, match entities by filename when full paths + don't match (helps with path prefix mismatches between steps) + Returns: str: DOT format graph representation """ + + def escape_dot(s: str) -> str: + """Escape string for DOT format.""" + return s.replace("\\", "\\\\").replace('"', '\\"') + + def node_id(path: str) -> str: + """Generate node ID, optionally normalizing paths.""" + if normalize_paths: + return Path(path).name + return path + lines = ["digraph provenance {"] lines.append(" rankdir=LR;") lines.append(' node [fontname="Arial"];') lines.append(' edge [fontname="Arial"];') lines.append("") - # Collect all unique files + # Collect all unique files from main chain files = set() for entity_id, entity in self.data.get("entity", {}).items(): if entity.get("prov:type") == "dataprov:DataFile": file_path = entity_id.replace("entity:", "") files.add(file_path) - # Add file nodes + # Build path normalization map if enabled + # Maps normalized name -> set of full paths + path_map: dict[str, set[str]] = {} + if normalize_paths: + for f in files: + name = Path(f).name + if name not in path_map: + path_map[name] = set() + path_map[name].add(f) + + # Add file nodes (main chain) lines.append(" // File nodes") + added_nodes = set() for file_path in sorted(files): + nid = node_id(file_path) + if nid in added_nodes: + continue + added_nodes.add(nid) label = Path(file_path).name - # Escape quotes and backslashes for DOT format - escaped_path = file_path.replace("\\", "\\\\").replace('"', '\\"') - escaped_label = label.replace("\\", "\\\\").replace('"', '\\"') lines.append( - f' "{escaped_path}" [shape=box, label="{escaped_label}", style=filled, fillcolor=skyblue];' + f' "{escape_dot(nid)}" [shape=box, label="{escape_dot(label)}", ' + f"style=filled, fillcolor=skyblue];" ) lines.append("") @@ -1552,36 +1583,156 @@ def to_dot(self) -> str: tool_name = step["tool"]["name"] operation = step["operation"] - # Escape for DOT format - escaped_tool = tool_name.replace("\\", "\\\\").replace('"', '\\"') - escaped_op = operation.replace("\\", "\\\\").replace('"', '\\"') - # Include DRL if available drl_info = "" if "drl" in step: drl_info = f"\\nDRL: {step['drl']}" - label = f"{escaped_tool}\\n{escaped_op}{drl_info}" + label = f"{escape_dot(tool_name)}\\n{escape_dot(operation)}{drl_info}" lines.append( - f' "step_{step_id}" [shape=ellipse, label="{label}", style=filled, fillcolor=palegreen];' + f' "step_{step_id}" [shape=ellipse, label="{label}", ' + f"style=filled, fillcolor=palegreen];" ) lines.append("") lines.append(" // Edges (data flow)") - # Add edges + # Add edges from main chain for step in steps: step_id = step["step_id"] # Input edges for inp in step["inputs"]: - escaped_path = inp["path"].replace("\\", "\\\\").replace('"', '\\"') - lines.append(f' "{escaped_path}" -> "step_{step_id}";') + nid = node_id(inp["path"]) + lines.append(f' "{escape_dot(nid)}" -> "step_{step_id}";') # Output edges for out in step["outputs"]: - escaped_path = out["path"].replace("\\", "\\\\").replace('"', '\\"') - lines.append(f' "step_{step_id}" -> "{escaped_path}";') + nid = node_id(out["path"]) + lines.append(f' "step_{step_id}" -> "{escape_dot(nid)}";') + + # Add bundles as subgraphs if requested + if include_bundles: + bundles = self.data.get("bundle", {}) + if bundles: + lines.append("") + lines.append(" // Nested provenance bundles") + + for bundle_id, bundle_content in bundles.items(): + # Create subgraph cluster for bundle + cluster_name = bundle_id.replace(":", "_").replace("-", "_") + lines.append("") + lines.append(f" subgraph cluster_{cluster_name} {{") + lines.append(f' label="{escape_dot(bundle_id)}";') + lines.append(" style=filled;") + lines.append(" fillcolor=lightyellow;") + lines.append(' fontname="Arial";') + lines.append("") + + # Add entities within bundle + bundle_files = set() + for ent_id, ent in bundle_content.get("entity", {}).items(): + if ent.get("prov:type") == "dataprov:DataFile": + file_path = ent_id.replace("entity:", "") + bundle_files.add(file_path) + + for file_path in sorted(bundle_files): + label = Path(file_path).name + # Prefix node ID with bundle to avoid collisions + b_node_id = f"{bundle_id}:{file_path}" + lines.append( + f' "{escape_dot(b_node_id)}" [shape=box, ' + f'label="{escape_dot(label)}", style=filled, ' + f"fillcolor=lightblue];" + ) + + # Add activity nodes within bundle + for act_id, act in bundle_content.get("activity", {}).items(): + tool_name = "unknown" + operation = act.get("dataprov:operation", "unknown") + + # Find agent for tool name + for assoc in bundle_content.get( + "wasAssociatedWith", {} + ).values(): + if assoc.get("prov:activity") == act_id: + agent_id = assoc.get("prov:agent") + agent = bundle_content.get("agent", {}).get( + agent_id, {} + ) + # Try both attribute names (toolName is used in some formats) + tool_name = agent.get( + "dataprov:name", + agent.get("dataprov:toolName", "unknown"), + ) + break + + b_act_id = f"{bundle_id}:{act_id}" + label = f"{escape_dot(tool_name)}\\n{escape_dot(operation)}" + lines.append( + f' "{escape_dot(b_act_id)}" [shape=ellipse, ' + f'label="{label}", style=filled, fillcolor=lightgreen];' + ) + + lines.append(" }") + + # Add edges within bundle (outside subgraph definition) + lines.append(f" // Edges for {bundle_id}") + for usage in bundle_content.get("used", {}).values(): + act_id = usage.get("prov:activity") + ent_id = usage.get("prov:entity") + if act_id and ent_id: + b_act_id = f"{bundle_id}:{act_id}" + b_ent_id = f"{bundle_id}:{ent_id.replace('entity:', '')}" + lines.append( + f' "{escape_dot(b_ent_id)}" -> ' + f'"{escape_dot(b_act_id)}";' + ) + + for gen in bundle_content.get("wasGeneratedBy", {}).values(): + ent_id = gen.get("prov:entity") + act_id = gen.get("prov:activity") + if act_id and ent_id: + b_act_id = f"{bundle_id}:{act_id}" + b_ent_id = f"{bundle_id}:{ent_id.replace('entity:', '')}" + lines.append( + f' "{escape_dot(b_act_id)}" -> ' + f'"{escape_dot(b_ent_id)}";' + ) + + # Add hadProvenance edges (dashed, connecting main chain to bundles) + lines.append("") + lines.append(" // Provenance reference edges") + for usage in self.data.get("used", {}).values(): + prov_ref = usage.get("dataprov:hadProvenance") + if prov_ref and prov_ref.startswith("bundle:"): + # Get the entity this usage refers to + ent_id = usage.get("prov:entity") + if ent_id: + ent_path = ent_id.replace("entity:", "") + nid = node_id(ent_path) + + # Find a representative node in the bundle to connect to + bundle_content = bundles.get(prov_ref, {}) + # Find output entity in bundle (last generated file) + bundle_outputs = [] + for gen in bundle_content.get( + "wasGeneratedBy", {} + ).values(): + out_ent = gen.get("prov:entity") + if out_ent: + bundle_outputs.append( + out_ent.replace("entity:", "") + ) + + if bundle_outputs: + # Connect to the last output of the bundle + target = f"{prov_ref}:{bundle_outputs[-1]}" + lines.append( + f' "{escape_dot(target)}" -> ' + f'"{escape_dot(nid)}" [style=dashed, ' + f'color=gray, label="provenance"];' + ) lines.append("}") return "\n".join(lines) diff --git a/pyproject.toml b/pyproject.toml index 1205f5d..0dc96c1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "dataprov" -version = "3.0.0" +version = "3.0.1" description = "W3C PROV-compliant data provenance tracking library for recording processing chains" readme = "README.md" license = {file = "LICENSE"} diff --git a/uv.lock b/uv.lock index 84ea433..558a241 100644 --- a/uv.lock +++ b/uv.lock @@ -126,7 +126,7 @@ toml = [ [[package]] name = "dataprov" -version = "3.0.0" +version = "3.0.1" source = { editable = "." } [package.optional-dependencies]