Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -10,13 +10,39 @@
from opentelemetry.instrumentation.instrumentor import BaseInstrumentor
from opentelemetry.instrumentation.crewai.version import __version__
from opentelemetry.semconv._incubating.attributes import (
error_attributes as ErrorAttributes,
gen_ai_attributes as GenAIAttributes,
)
from opentelemetry.semconv_ai import SpanAttributes, TraceloopSpanKindValues, Meters
from .crewai_span_attributes import CrewAISpanAttributes, set_span_attribute

_instruments = ("crewai >= 0.70.0",)

# GenAI memory semantic convention attribute keys (fallback to string
# literals when the installed semconv package doesn't define them yet).
_GEN_AI_OPERATION_NAME = getattr(GenAIAttributes, "GEN_AI_OPERATION_NAME", "gen_ai.operation.name")
_GEN_AI_PROVIDER_NAME = getattr(GenAIAttributes, "GEN_AI_PROVIDER_NAME", "gen_ai.provider.name")
_GEN_AI_MEMORY_SCOPE = getattr(GenAIAttributes, "GEN_AI_MEMORY_SCOPE", "gen_ai.memory.scope")
_GEN_AI_MEMORY_TYPE = getattr(GenAIAttributes, "GEN_AI_MEMORY_TYPE", "gen_ai.memory.type")
_GEN_AI_MEMORY_QUERY = getattr(GenAIAttributes, "GEN_AI_MEMORY_QUERY", "gen_ai.memory.query")
_GEN_AI_MEMORY_CONTENT = getattr(GenAIAttributes, "GEN_AI_MEMORY_CONTENT", "gen_ai.memory.content")
_GEN_AI_MEMORY_NAMESPACE = getattr(GenAIAttributes, "GEN_AI_MEMORY_NAMESPACE", "gen_ai.memory.namespace")
_GEN_AI_MEMORY_SEARCH_RESULT_COUNT = getattr(
GenAIAttributes, "GEN_AI_MEMORY_SEARCH_RESULT_COUNT", "gen_ai.memory.search.result.count"
)
_GEN_AI_MEMORY_UPDATE_STRATEGY = getattr(
GenAIAttributes, "GEN_AI_MEMORY_UPDATE_STRATEGY", "gen_ai.memory.update.strategy"
)
_GEN_AI_MEMORY_IMPORTANCE = getattr(GenAIAttributes, "GEN_AI_MEMORY_IMPORTANCE", "gen_ai.memory.importance")
_ERROR_TYPE = getattr(ErrorAttributes, "ERROR_TYPE", "error.type")

_PROVIDER = "crewai"


def _capture_content() -> bool:
"""Check if memory content capture is enabled."""
return os.environ.get("OTEL_INSTRUMENTATION_GENAI_CAPTURE_MESSAGE_CONTENT", "").lower() in ("true", "1")


class CrewAIInstrumentor(BaseInstrumentor):

Expand Down Expand Up @@ -44,12 +70,40 @@ def _instrument(self, **kwargs):
wrap_function_wrapper("crewai.llm", "LLM.call",
wrap_llm_call(tracer, duration_histogram, token_histogram))

# Memory operations (crewai.memory.unified_memory.Memory)
try:
wrap_function_wrapper(
"crewai.memory.unified_memory", "Memory.remember",
wrap_memory_remember(tracer, duration_histogram))
wrap_function_wrapper(
"crewai.memory.unified_memory", "Memory.recall",
wrap_memory_recall(tracer, duration_histogram))
wrap_function_wrapper(
"crewai.memory.unified_memory", "Memory.forget",
wrap_memory_forget(tracer, duration_histogram))
wrap_function_wrapper(
"crewai.memory.unified_memory", "Memory.reset",
wrap_memory_reset(tracer, duration_histogram))
except Exception:
# CrewAI versions before unified_memory may not have these classes
pass

def _uninstrument(self, **kwargs):
unwrap("crewai.crew.Crew", "kickoff")
unwrap("crewai.agent.Agent", "execute_task")
unwrap("crewai.task.Task", "execute_sync")
unwrap("crewai.llm.LLM", "call")

# Memory unwrap (ignore if not wrapped)
try:
from crewai.memory.unified_memory import Memory as UnifiedMemory
unwrap(UnifiedMemory, "remember")
unwrap(UnifiedMemory, "recall")
unwrap(UnifiedMemory, "forget")
unwrap(UnifiedMemory, "reset")
except Exception:
pass


def with_tracer_wrapper(func):
"""Helper for providing tracer for wrapper functions."""
Expand Down Expand Up @@ -199,3 +253,229 @@ def _create_metrics(meter: Meter):
)

return token_histogram, duration_histogram


# ---------------------------------------------------------------------------
# Memory operation wrappers — aligned with GenAI memory semantic conventions
# ---------------------------------------------------------------------------


def _infer_memory_scope(instance) -> str:
"""Infer memory scope from the Memory instance or its MemoryScope wrapper."""
# MemoryScope has a _root attribute like "/agent/1" or "/user/123"
root = getattr(instance, "_root", None)
if root:
parts = root.strip("/").split("/")
if parts:
first = parts[0].lower()
if first in ("user", "agent", "session", "team", "global"):
return first
return "agent"
Comment on lines +263 to +273
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🔴 Critical

🧩 Analysis chain

🏁 Script executed:

# First, find the instrumentation file mentioned in the review
find . -name "instrumentation.py" -path "*crewai*" | head -5

Repository: traceloop/openllmetry

Length of output: 168


🏁 Script executed:

# Search for Memory and MemoryScope references in the codebase
rg "class Memory|class MemoryScope" -A 5

Repository: traceloop/openllmetry

Length of output: 47


🏁 Script executed:

# Look for unified_memory references
rg "unified_memory\|MemoryScope\|_root" --type py

Repository: traceloop/openllmetry

Length of output: 47


🏁 Script executed:

# Read the instrumentation.py file to see the complete context and usage
cat -n ./packages/opentelemetry-instrumentation-crewai/opentelemetry/instrumentation/crewai/instrumentation.py | head -300

Repository: traceloop/openllmetry

Length of output: 14435


🏁 Script executed:

# Search for calls to _infer_memory_scope
rg "_infer_memory_scope" --type py -B 3 -A 3

Repository: traceloop/openllmetry

Length of output: 5424


🏁 Script executed:

# Check dependencies for CrewAI version
find . -name "pyproject.toml" -o -name "requirements.txt" | xargs grep -l "crewai"

Repository: traceloop/openllmetry

Length of output: 204


🏁 Script executed:

# Check the crewai dependency version in the CrewAI instrumentation package
cat ./packages/opentelemetry-instrumentation-crewai/pyproject.toml | grep -A 20 "dependencies"

Repository: traceloop/openllmetry

Length of output: 926


🏁 Script executed:

# Now let's search for the Memory class usage and see what object is actually passed to _infer_memory_scope
# Look at the memory wrapper functions
sed -n '300,450p' ./packages/opentelemetry-instrumentation-crewai/opentelemetry/instrumentation/crewai/instrumentation.py

Repository: traceloop/openllmetry

Length of output: 6426


🏁 Script executed:

# Look for tests of the memory instrumentation to see how Memory objects are used
find . -name "*test*" -type f -path "*crewai*" | head -10

Repository: traceloop/openllmetry

Length of output: 234


🏁 Script executed:

# Look for any mocks or references to Memory structure in tests
rg "Memory\|_root\|MemoryScope" --type py -A 2 -B 2 ./packages/opentelemetry-instrumentation-crewai/tests/ 2>/dev/null || echo "No tests directory found"

Repository: traceloop/openllmetry

Length of output: 89


🏁 Script executed:

# Check if there's documentation or comments about Memory structure elsewhere
rg "unified_memory\|MemoryScope" --type py ./packages/opentelemetry-instrumentation-crewai/

Repository: traceloop/openllmetry

Length of output: 47


🏁 Script executed:

# Check the memory instrumentation tests to see how Memory objects are used
cat ./packages/opentelemetry-instrumentation-crewai/tests/test_memory_instrumentation.py

Repository: traceloop/openllmetry

Length of output: 7888


🏁 Script executed:

# Let's check if there's any documentation or comments about Memory's _root attribute
# in the instrumentation code
rg "Memory\|_root" --type py -B 3 -A 3 ./packages/opentelemetry-instrumentation-crewai/

Repository: traceloop/openllmetry

Length of output: 47


🏁 Script executed:

# Let's also check the git history to see if there are comments about this
git log --oneline -n 20 -- ./packages/opentelemetry-instrumentation-crewai/opentelemetry/instrumentation/crewai/instrumentation.py 2>/dev/null || echo "Git history not available"

Repository: traceloop/openllmetry

Length of output: 147


🌐 Web query:

CrewAI Memory class _root attribute MemoryScope unified_memory

💡 Result:

In CrewAI’s unified memory system, MemoryScope is essentially a scoped “view” over a single underlying Memory instance: it limits operations (remember/recall/forget, etc.) to one branch of the scope tree (e.g. /agent/researcher). The private MemoryScope._root attribute is the internal reference to that underlying root Memory object that actually performs the storage/retrieval; you typically shouldn’t access _root directly—use the public API like memory.scope("/path"). [1]

unified_memory is also used by CrewAI as the source_type emitted in memory-related events for this unified system. [1]

If you’re seeing inconsistent behavior/imports (e.g., from crewai import Memory not working), it may be because your installed crewai package version differs from what the docs/repo show; CrewAI community reports mismatches between “separate memory classes” vs the newer unified-memory exports (crewai.memory.unified_memory.Memory, MemoryScope, etc.). [2]

Sources: [1] CrewAI Memory docs (unified memory, MemoryScope, unified_memory events) https://docs.crewai.com/ko/concepts/memory, [2] CrewAI community thread showing unified-memory module exports vs older imports https://community.crewai.com/t/was-getting-error-while-importing-memory-using-the-same-method-displayed-in-docs/7344


🌐 Web query:

crewai 0.80.0 Memory class unified_memory source code

💡 Result:

In crewai==0.80.0 there is no unified_memory module / Memory class under crewai.memory.unified_memory (those paths 404 at the 0.80.0 tag). [2]

The Memory class that exists in 0.80.0 is the base class at:

  • src/crewai/memory/memory.py [1]

Source code (as of tag 0.80.0):

from typing import Any, Dict, Optional, List
from crewai.memory.storage.rag_storage import RAGStorage

class Memory:
    """ Base class for memory, now supporting agent tags and generic metadata.
    """
    def __init__(self, storage: RAGStorage):
        self.storage = storage

    def save(
        self,
        value: Any,
        metadata: Optional[Dict[str, Any]] = None,
        agent: Optional[str] = None,
    ) -> None:
        metadata = metadata or {}
        if agent:
            metadata["agent"] = agent
        self.storage.save(value, metadata)

    def search(self, query: str) -> List[Dict[str, Any]]:
        return self.storage.search(query)
``` [1]

**Sources:** [1] [2]

---

🌐 Web query:

`CrewAI Memory class attributes scope root unified_memory`

💡 Result:

In CrewAIs **unified `Memory`** system:

- **`scope`**: a hierarchicalfilesystem-likepath that partitions memories (e.g. `/`, `/project/alpha`, `/agent/researcher`). Recalls can be limited to a scope/branch, and `memory.scope("/agent/researcher")` returns a scoped view (a `MemoryScope`) that only reads/writes within that subtree. [1]  
- **`root`**: the **root scope is `/`** (the top of the scope tree). [1]  
- **`unified_memory`**: memory operations emit events tagged with **`source_type="unified_memory"`** (useful for filtering memory-related events in listeners). [2]

**Sources:** CrewAI Memory docs (scopes/root + `MemoryScope`) [1], CrewAI Memory docs (events `source_type="unified_memory"`) [2].

---

🌐 Web query:

`crewai unified_memory Memory class version support when added`

💡 Result:

CrewAIs **unified memory `Memory` class** (i.e., `from crewai import Memory`, implemented in `crewai.memory.unified_memory`) was introduced by PR **New Unified Memory System`#4420`**, which was **merged on Feb 14, 2026**. [1]

The first CrewAI release that explicitly includes this change is the **pre-release `1.10.0a1` (Feb 19, 2026)**, whose release notes list **New Unified Memory System**. [2]

This unified system replaces the older separate memory types (Short/Long-term, Entity, External) with a single `Memory` API, as reflected in the current docs. [1][3]

**Sources**
[1] PR `#4420` (merged Feb 14, 2026) – crewAIInc/crewAI  
[2] GitHub Releases page (shows `1.10.0a1`, Feb 19, 2026, includesNew Unified Memory System”) – crewAIInc/crewAI  
[3] CrewAI docs: Memory concept page describing the unified `Memory` classdocs.crewai.com

---

🌐 Web query:

`site:github.com crewai memory.unified_memory Memory _root attribute`

💡 Result:

In the current CrewAI codebase, `crewai.memory.unified_memory.Memory` **does not have** a `_root` attribute—it’s a regular Python class (not a Pydantic RootModel). Its internal state is kept in attributes like `_config`, `_storage`, `_llm_instance`, etc. [1]

If youre trying to work with therootof the memory hierarchy, CrewAI models that as a **scope path string**, where the root scope is `"/"` (and APIs take `scope` / `scope_prefix`, e.g., storage `reset(scope_prefix=...)`). [2]

**Sources**: [1], [2]

---



</details>

**Replace `_infer_memory_scope` to extract scope from `kwargs` instead of relying on a non-existent `instance._root` attribute.**

The CrewAI `Memory` class (both in versions 0.80.0 and the current unified_memory system) does **not** have a `_root` attribute. The current implementation will always return `"agent"` as a fallback, silently producing incorrect scope values in telemetry for all non-agent memory operations.

The scope information is already available in the `kwargs` passed to `Memory.remember()`, `Memory.recall()`, `Memory.forget()`, and `Memory.reset()` as the `scope` parameter. Extract and parse the scope from `kwargs` instead:

```python
def _infer_memory_scope(kwargs) -> str:
    """Infer memory scope from the scope kwarg."""
    scope = kwargs.get("scope")
    if scope and isinstance(scope, str):
        parts = scope.strip("/").split("/")
        if parts:
            first = parts[0].lower()
            if first in ("user", "agent", "session", "team", "global"):
                return first
    return "agent"

Then update all call sites to pass kwargs instead of instance:

  • _infer_memory_scope(kwargs) in wrap_memory_remember
  • _infer_memory_scope(kwargs) in wrap_memory_recall
  • _infer_memory_scope(kwargs) in wrap_memory_forget
  • _infer_memory_scope(kwargs) in wrap_memory_reset
🤖 Prompt for AI Agents
Verify each finding against the current code and only fix it if needed.

In
`@packages/opentelemetry-instrumentation-crewai/opentelemetry/instrumentation/crewai/instrumentation.py`
around lines 263 - 273, The _infer_memory_scope function currently reads a
non-existent instance._root and should be replaced to extract the scope from the
kwargs passed into Memory methods: change _infer_memory_scope to accept kwargs,
read scope = kwargs.get("scope"), validate it's a string, split/strip it and
return the first component if it's one of
("user","agent","session","team","global"), otherwise return "agent"; then
update all call sites to pass kwargs (i.e. call _infer_memory_scope(kwargs))
inside wrap_memory_remember, wrap_memory_recall, wrap_memory_forget, and
wrap_memory_reset so telemetry uses the actual scope parameter.



def _infer_memory_type(kwargs) -> str:
"""Infer memory type from kwargs categories hint, defaulting to long_term."""
categories = kwargs.get("categories")
if categories and isinstance(categories, list):
for cat in categories:
cl = str(cat).lower()
if "short" in cl:
return "short_term"
if "entity" in cl:
return "entity"
return "long_term"


def _set_memory_error(span, exc):
"""Record error details on the span."""
error_type = type(exc).__qualname__
span.set_status(Status(StatusCode.ERROR, str(exc)))
set_span_attribute(span, _ERROR_TYPE, error_type)
return error_type


def _record_memory_duration(duration_histogram, duration_s, operation, error_type=None):
"""Record memory operation duration metric."""
if not duration_histogram:
return
attrs = {
_GEN_AI_OPERATION_NAME: operation,
GenAIAttributes.GEN_AI_SYSTEM: _PROVIDER,
}
if error_type:
attrs[_ERROR_TYPE] = error_type
duration_histogram.record(max(duration_s, 0.0), attributes=attrs)


def wrap_memory_remember(tracer: Tracer, duration_histogram: Histogram):
"""Wrap Memory.remember() → update_memory span."""
def _wrapper(wrapped, instance, args, kwargs):
operation = "update_memory"
span_name = f"{operation} {_PROVIDER}"
error_type = None
start_time = time.time()
with tracer.start_as_current_span(
span_name, kind=SpanKind.CLIENT,
attributes={GenAIAttributes.GEN_AI_SYSTEM: _PROVIDER}
) as span:
set_span_attribute(span, _GEN_AI_OPERATION_NAME, operation)
set_span_attribute(span, _GEN_AI_PROVIDER_NAME, _PROVIDER)
set_span_attribute(span, _GEN_AI_MEMORY_SCOPE, _infer_memory_scope(instance))
set_span_attribute(span, _GEN_AI_MEMORY_TYPE, _infer_memory_type(kwargs))
set_span_attribute(span, _GEN_AI_MEMORY_UPDATE_STRATEGY, "merge")
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🟡 Minor

Hardcoded "merge" strategy may produce inaccurate telemetry.

_GEN_AI_MEMORY_UPDATE_STRATEGY is set to "merge" regardless of what CrewAI's Memory.remember() actually does. If CrewAI uses other strategies (e.g., "overwrite", "append") depending on instance configuration or call arguments, every span will report the wrong strategy. Either read this value from the instance/kwargs, or omit the attribute until the actual strategy is observable.

🤖 Prompt for AI Agents
Verify each finding against the current code and only fix it if needed.

In
`@packages/opentelemetry-instrumentation-crewai/opentelemetry/instrumentation/crewai/instrumentation.py`
at line 325, The code currently hardcodes the memory update strategy by calling
set_span_attribute(span, _GEN_AI_MEMORY_UPDATE_STRATEGY, "merge"); instead,
derive the actual strategy from the runtime context (e.g., inspect the
Memory.remember() call arguments or the Memory instance—check
kwargs.get("strategy") or getattr(memory_instance, "strategy", None)) and only
call set_span_attribute when a concrete strategy value is present; if you cannot
determine it, omit setting the _GEN_AI_MEMORY_UPDATE_STRATEGY attribute to avoid
reporting incorrect telemetry.


# Namespace from source kwarg
source = kwargs.get("source")
if source:
set_span_attribute(span, _GEN_AI_MEMORY_NAMESPACE, str(source))

# Scope path
scope = kwargs.get("scope")
if scope:
set_span_attribute(span, "crewai.memory.scope_path", str(scope))

importance = kwargs.get("importance")
if importance is not None:
set_span_attribute(span, _GEN_AI_MEMORY_IMPORTANCE, float(importance))

# Content (opt-in)
if _capture_content() and args:
content = args[0] if args else kwargs.get("content")
if content and isinstance(content, str):
set_span_attribute(span, _GEN_AI_MEMORY_CONTENT, content)
Comment on lines +342 to +345
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🟡 Minor

Content capture silently fails for keyword-only callers, and contains dead code.

Two issues:

  1. The guard if _capture_content() and args: skips capture entirely when content is passed as a keyword argument (e.g., remember(content="...")), even if content capture is enabled. The kwargs.get("content") fallback inside the block is unreachable because args is always truthy at that point.

  2. The inner ternary args[0] if args else kwargs.get("content") is dead code: the outer condition already guarantees args is truthy.

🐛 Proposed fix
-            # Content (opt-in)
-            if _capture_content() and args:
-                content = args[0] if args else kwargs.get("content")
-                if content and isinstance(content, str):
-                    set_span_attribute(span, _GEN_AI_MEMORY_CONTENT, content)
+            # Content (opt-in)
+            if _capture_content():
+                content = args[0] if args else kwargs.get("content")
+                if content and isinstance(content, str):
+                    set_span_attribute(span, _GEN_AI_MEMORY_CONTENT, content)
📝 Committable suggestion

‼️ IMPORTANT
Carefully review the code before committing. Ensure that it accurately replaces the highlighted code, contains no missing lines, and has no issues with indentation. Thoroughly test & benchmark the code to ensure it meets the requirements.

Suggested change
if _capture_content() and args:
content = args[0] if args else kwargs.get("content")
if content and isinstance(content, str):
set_span_attribute(span, _GEN_AI_MEMORY_CONTENT, content)
# Content (opt-in)
if _capture_content():
content = args[0] if args else kwargs.get("content")
if content and isinstance(content, str):
set_span_attribute(span, _GEN_AI_MEMORY_CONTENT, content)
🤖 Prompt for AI Agents
Verify each finding against the current code and only fix it if needed.

In
`@packages/opentelemetry-instrumentation-crewai/opentelemetry/instrumentation/crewai/instrumentation.py`
around lines 342 - 345, The content-capture guard currently skips keyword-only
calls and contains dead code; change the if to check content capture and
presence of content in either args or kwargs (e.g., if _capture_content() and
(args or "content" in kwargs):), then set content = args[0] if args else
kwargs.get("content"), and finally if content and isinstance(content, str): call
set_span_attribute(span, _GEN_AI_MEMORY_CONTENT, content); this removes the
unreachable branches and ensures keyword argument captures work for the content
capture logic.


try:
result = wrapped(*args, **kwargs)
span.set_status(Status(StatusCode.OK))
# MemoryRecord has an id attribute
if result and hasattr(result, "id"):
set_span_attribute(span, "gen_ai.memory.id", str(result.id))
return result
except Exception as ex:
error_type = _set_memory_error(span, ex)
raise
finally:
_record_memory_duration(
duration_histogram, time.time() - start_time, operation, error_type
)
return _wrapper


def wrap_memory_recall(tracer: Tracer, duration_histogram: Histogram):
"""Wrap Memory.recall() → search_memory span."""
def _wrapper(wrapped, instance, args, kwargs):
operation = "search_memory"
span_name = f"{operation} {_PROVIDER}"
error_type = None
start_time = time.time()
with tracer.start_as_current_span(
span_name, kind=SpanKind.CLIENT,
attributes={GenAIAttributes.GEN_AI_SYSTEM: _PROVIDER}
) as span:
set_span_attribute(span, _GEN_AI_OPERATION_NAME, operation)
set_span_attribute(span, _GEN_AI_PROVIDER_NAME, _PROVIDER)
set_span_attribute(span, _GEN_AI_MEMORY_SCOPE, _infer_memory_scope(instance))
set_span_attribute(span, _GEN_AI_MEMORY_TYPE, _infer_memory_type(kwargs))

# Query (opt-in)
query = args[0] if args else kwargs.get("query")
if _capture_content() and query and isinstance(query, str):
set_span_attribute(span, _GEN_AI_MEMORY_QUERY, query)

# Scope path
scope = kwargs.get("scope")
if scope:
set_span_attribute(span, "crewai.memory.scope_path", str(scope))

source = kwargs.get("source")
if source:
set_span_attribute(span, _GEN_AI_MEMORY_NAMESPACE, str(source))

try:
result = wrapped(*args, **kwargs)
span.set_status(Status(StatusCode.OK))
if isinstance(result, list):
set_span_attribute(span, _GEN_AI_MEMORY_SEARCH_RESULT_COUNT, len(result))
return result
except Exception as ex:
error_type = _set_memory_error(span, ex)
raise
finally:
_record_memory_duration(
duration_histogram, time.time() - start_time, operation, error_type
)
return _wrapper


def wrap_memory_forget(tracer: Tracer, duration_histogram: Histogram):
"""Wrap Memory.forget() → delete_memory span."""
def _wrapper(wrapped, instance, args, kwargs):
operation = "delete_memory"
span_name = f"{operation} {_PROVIDER}"
error_type = None
start_time = time.time()
with tracer.start_as_current_span(
span_name, kind=SpanKind.CLIENT,
attributes={GenAIAttributes.GEN_AI_SYSTEM: _PROVIDER}
) as span:
set_span_attribute(span, _GEN_AI_OPERATION_NAME, operation)
set_span_attribute(span, _GEN_AI_PROVIDER_NAME, _PROVIDER)
set_span_attribute(span, _GEN_AI_MEMORY_SCOPE, _infer_memory_scope(instance))

scope = kwargs.get("scope")
if scope:
set_span_attribute(span, "crewai.memory.scope_path", str(scope))

record_ids = kwargs.get("record_ids")
if record_ids and isinstance(record_ids, list) and len(record_ids) == 1:
set_span_attribute(span, "gen_ai.memory.id", str(record_ids[0]))

try:
result = wrapped(*args, **kwargs)
span.set_status(Status(StatusCode.OK))
# forget() returns number of deleted records
if isinstance(result, int):
set_span_attribute(span, "crewai.memory.deleted_count", result)
return result
except Exception as ex:
error_type = _set_memory_error(span, ex)
raise
finally:
_record_memory_duration(
duration_histogram, time.time() - start_time, operation, error_type
)
return _wrapper


def wrap_memory_reset(tracer: Tracer, duration_histogram: Histogram):
"""Wrap Memory.reset() → delete_memory span (scope-level wipe)."""
def _wrapper(wrapped, instance, args, kwargs):
operation = "delete_memory"
span_name = f"{operation} {_PROVIDER}"
error_type = None
start_time = time.time()
with tracer.start_as_current_span(
span_name, kind=SpanKind.CLIENT,
attributes={GenAIAttributes.GEN_AI_SYSTEM: _PROVIDER}
) as span:
set_span_attribute(span, _GEN_AI_OPERATION_NAME, operation)
set_span_attribute(span, _GEN_AI_PROVIDER_NAME, _PROVIDER)
set_span_attribute(span, _GEN_AI_MEMORY_SCOPE, _infer_memory_scope(instance))
set_span_attribute(span, "crewai.memory.reset", True)

scope = kwargs.get("scope")
if scope:
set_span_attribute(span, "crewai.memory.scope_path", str(scope))

try:
result = wrapped(*args, **kwargs)
span.set_status(Status(StatusCode.OK))
return result
except Exception as ex:
error_type = _set_memory_error(span, ex)
raise
finally:
_record_memory_duration(
duration_histogram, time.time() - start_time, operation, error_type
)
return _wrapper
Loading