From 7932def67f05a12c7aa52242e8b56f006508196a Mon Sep 17 00:00:00 2001 From: Weichen Xu Date: Mon, 20 Apr 2026 21:35:28 +0800 Subject: [PATCH 01/10] init Signed-off-by: Weichen Xu --- examples/filters/mlflow_filter_pipeline.py | 149 +++++++++++++++++++++ 1 file changed, 149 insertions(+) create mode 100644 examples/filters/mlflow_filter_pipeline.py diff --git a/examples/filters/mlflow_filter_pipeline.py b/examples/filters/mlflow_filter_pipeline.py new file mode 100644 index 00000000..14c5f337 --- /dev/null +++ b/examples/filters/mlflow_filter_pipeline.py @@ -0,0 +1,149 @@ +""" +title: MLflow Filter Pipeline +author: open-webui +date: 2026-04-20 +version: 0.0.1 +license: MIT +description: A filter pipeline that uses MLflow for tracing multi-turn chat sessions. +requirements: mlflow>=2.14.0 +""" + +from typing import List, Optional +import os +import uuid + +from utils.pipelines.main import get_last_assistant_message, get_last_user_message +from pydantic import BaseModel +import mlflow +from mlflow.entities import SpanType + + +def get_last_assistant_message_obj(messages: List[dict]) -> dict: + for message in reversed(messages): + if message["role"] == "assistant": + return message + return {} + + +class Pipeline: + class Valves(BaseModel): + pipelines: List[str] = [] + priority: int = 0 + mlflow_tracking_uri: str = "http://localhost:5000" + mlflow_experiment_name: str = "open-webui" + debug: bool = False + + def __init__(self): + self.type = "filter" + self.name = "MLflow Filter" + + self.valves = self.Valves( + **{ + "pipelines": ["*"], + "mlflow_tracking_uri": os.getenv("MLFLOW_TRACKING_URI", "http://localhost:5000"), + "mlflow_experiment_name": os.getenv("MLFLOW_EXPERIMENT_NAME", "open-webui"), + "debug": os.getenv("DEBUG_MODE", "false").lower() == "true", + } + ) + + # Keyed by chat_id: stores inlet snapshot to be consumed in outlet + self.pending_inlets: dict = {} + + def log(self, message: str): + if self.valves.debug: + print(f"[DEBUG] {message}") + + async def on_startup(self): + self.log(f"on_startup triggered for {__name__}") + self._setup_mlflow() + + async def on_shutdown(self): + self.log(f"on_shutdown triggered for {__name__}") + + async def on_valves_updated(self): + self.log("Valves updated, resetting MLflow config.") + self._setup_mlflow() + + def _setup_mlflow(self): + mlflow.set_tracking_uri(self.valves.mlflow_tracking_uri) + mlflow.set_experiment(self.valves.mlflow_experiment_name) + self.log( + f"MLflow configured — uri: {self.valves.mlflow_tracking_uri}, " + f"experiment: {self.valves.mlflow_experiment_name}" + ) + + async def inlet(self, body: dict, user: Optional[dict] = None) -> dict: + self.log("MLflow Filter INLET called") + + metadata = body.get("metadata", {}) + chat_id = metadata.get("chat_id", str(uuid.uuid4())) + + if chat_id == "local": + chat_id = f"temporary-session-{metadata.get('session_id')}" + + metadata["chat_id"] = chat_id + body["metadata"] = metadata + + # Snapshot the last user message so outlet can log it as this turn's input + self.pending_inlets[chat_id] = { + "input": get_last_user_message(body.get("messages", [])), + "model": body.get("model"), + "user_email": user.get("email") if user else None, + } + + self.log(f"Stored inlet snapshot for chat_id: {chat_id}") + return body + + async def outlet(self, body: dict, user: Optional[dict] = None) -> dict: + self.log("MLflow Filter OUTLET called") + + metadata = body.get("metadata", {}) + chat_id = metadata.get("chat_id") or body.get("chat_id") + + if chat_id == "local": + chat_id = f"temporary-session-{body.get('session_id')}" + + inlet_data = self.pending_inlets.pop(chat_id, None) + if inlet_data is None: + self.log(f"[WARNING] No inlet snapshot found for chat_id: {chat_id} — skipping trace") + return body + + user_email = inlet_data["user_email"] or (user.get("email") if user else "unknown") + model = inlet_data["model"] or body.get("model", "unknown") + user_input = inlet_data["input"] + + assistant_message = get_last_assistant_message(body["messages"]) + assistant_message_obj = get_last_assistant_message_obj(body["messages"]) + + # Extract token usage if available + usage_attrs = {} + if assistant_message_obj: + info = assistant_message_obj.get("usage") or {} + input_tokens = info.get("prompt_eval_count") or info.get("prompt_tokens") + output_tokens = info.get("eval_count") or info.get("completion_tokens") + if input_tokens is not None: + usage_attrs["input_tokens"] = input_tokens + if output_tokens is not None: + usage_attrs["output_tokens"] = output_tokens + + try: + with mlflow.start_span(name="chat_turn", span_type=SpanType.AGENT) as span: + span.set_inputs({"user": user_input}) + span.set_outputs({"response": assistant_message}) + span.set_attribute("model", model) + for k, v in usage_attrs.items(): + span.set_attribute(k, v) + + # Groups all turns of this chat under one session in the MLflow UI + mlflow.update_current_trace( + metadata={ + "mlflow.trace.session": chat_id, + "mlflow.trace.user": user_email, + } + ) + + self.log(f"MLflow trace logged for chat_id: {chat_id}") + except Exception as e: + self.log(f"Failed to log MLflow trace: {e}") + + return body From d38b08cd16fbe8cb385befddd18735636398a598 Mon Sep 17 00:00:00 2001 From: Weichen Xu Date: Mon, 20 Apr 2026 23:15:30 +0800 Subject: [PATCH 02/10] init Signed-off-by: Weichen Xu --- examples/filters/mlflow_filter_pipeline.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/filters/mlflow_filter_pipeline.py b/examples/filters/mlflow_filter_pipeline.py index 14c5f337..11c54183 100644 --- a/examples/filters/mlflow_filter_pipeline.py +++ b/examples/filters/mlflow_filter_pipeline.py @@ -86,7 +86,7 @@ async def inlet(self, body: dict, user: Optional[dict] = None) -> dict: # Snapshot the last user message so outlet can log it as this turn's input self.pending_inlets[chat_id] = { - "input": get_last_user_message(body.get("messages", [])), + "input": get_last_user_message(body["messages"]), "model": body.get("model"), "user_email": user.get("email") if user else None, } From 46362a85031d23b1a678f942cf58b0c48c3e51e2 Mon Sep 17 00:00:00 2001 From: Weichen Xu Date: Tue, 21 Apr 2026 16:46:44 +0800 Subject: [PATCH 03/10] update Signed-off-by: Weichen Xu --- examples/filters/mlflow_filter_pipeline.py | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/examples/filters/mlflow_filter_pipeline.py b/examples/filters/mlflow_filter_pipeline.py index 11c54183..e95f7852 100644 --- a/examples/filters/mlflow_filter_pipeline.py +++ b/examples/filters/mlflow_filter_pipeline.py @@ -16,6 +16,7 @@ from pydantic import BaseModel import mlflow from mlflow.entities import SpanType +from mlflow.tracing.constant import SpanAttributeKey, TokenUsageKey def get_last_assistant_message_obj(messages: List[dict]) -> dict: @@ -116,23 +117,25 @@ async def outlet(self, body: dict, user: Optional[dict] = None) -> dict: assistant_message_obj = get_last_assistant_message_obj(body["messages"]) # Extract token usage if available - usage_attrs = {} + token_usage = {} if assistant_message_obj: info = assistant_message_obj.get("usage") or {} input_tokens = info.get("prompt_eval_count") or info.get("prompt_tokens") output_tokens = info.get("eval_count") or info.get("completion_tokens") if input_tokens is not None: - usage_attrs["input_tokens"] = input_tokens + token_usage[TokenUsageKey.INPUT_TOKENS] = input_tokens if output_tokens is not None: - usage_attrs["output_tokens"] = output_tokens + token_usage[TokenUsageKey.OUTPUT_TOKENS] = output_tokens + if input_tokens is not None and output_tokens is not None: + token_usage[TokenUsageKey.TOTAL_TOKENS] = input_tokens + output_tokens try: with mlflow.start_span(name="chat_turn", span_type=SpanType.AGENT) as span: span.set_inputs({"user": user_input}) span.set_outputs({"response": assistant_message}) span.set_attribute("model", model) - for k, v in usage_attrs.items(): - span.set_attribute(k, v) + if token_usage: + span.set_attribute(SpanAttributeKey.CHAT_USAGE, token_usage) # Groups all turns of this chat under one session in the MLflow UI mlflow.update_current_trace( From 14596b8c8982db7f418a640cf2db3f7be0c9d401 Mon Sep 17 00:00:00 2001 From: Weichen Xu Date: Tue, 21 Apr 2026 16:56:02 +0800 Subject: [PATCH 04/10] address comments Signed-off-by: Weichen Xu --- examples/filters/mlflow_filter_pipeline.py | 35 ++++++++++++++-------- 1 file changed, 22 insertions(+), 13 deletions(-) diff --git a/examples/filters/mlflow_filter_pipeline.py b/examples/filters/mlflow_filter_pipeline.py index e95f7852..cdd1282c 100644 --- a/examples/filters/mlflow_filter_pipeline.py +++ b/examples/filters/mlflow_filter_pipeline.py @@ -47,7 +47,8 @@ def __init__(self): } ) - # Keyed by chat_id: stores inlet snapshot to be consumed in outlet + # Keyed by per-request ID (stored in metadata["mlflow_request_id"]) so concurrent + # requests for the same chat don't overwrite each other's snapshots. self.pending_inlets: dict = {} def log(self, message: str): @@ -77,38 +78,44 @@ async def inlet(self, body: dict, user: Optional[dict] = None) -> dict: self.log("MLflow Filter INLET called") metadata = body.get("metadata", {}) - chat_id = metadata.get("chat_id", str(uuid.uuid4())) + chat_id = body.get("chat_id") or metadata.get("chat_id") or str(uuid.uuid4()) if chat_id == "local": - chat_id = f"temporary-session-{metadata.get('session_id')}" + session_id = metadata.get("session_id") or body.get("session_id") or str(uuid.uuid4()) + metadata["session_id"] = session_id + body["session_id"] = session_id + chat_id = f"temporary-session-{session_id}" metadata["chat_id"] = chat_id + body["chat_id"] = chat_id body["metadata"] = metadata - # Snapshot the last user message so outlet can log it as this turn's input - self.pending_inlets[chat_id] = { + # Per-request ID so concurrent requests for the same chat don't collide in pending_inlets + request_id = str(uuid.uuid4()) + metadata["mlflow_request_id"] = request_id + + self.pending_inlets[request_id] = { + "chat_id": chat_id, "input": get_last_user_message(body["messages"]), "model": body.get("model"), "user_email": user.get("email") if user else None, } - self.log(f"Stored inlet snapshot for chat_id: {chat_id}") + self.log(f"Stored inlet snapshot for request_id: {request_id}, chat_id: {chat_id}") return body async def outlet(self, body: dict, user: Optional[dict] = None) -> dict: self.log("MLflow Filter OUTLET called") metadata = body.get("metadata", {}) - chat_id = metadata.get("chat_id") or body.get("chat_id") - - if chat_id == "local": - chat_id = f"temporary-session-{body.get('session_id')}" + request_id = metadata.get("mlflow_request_id") - inlet_data = self.pending_inlets.pop(chat_id, None) + inlet_data = self.pending_inlets.pop(request_id, None) if request_id else None if inlet_data is None: - self.log(f"[WARNING] No inlet snapshot found for chat_id: {chat_id} — skipping trace") + self.log(f"[WARNING] No inlet snapshot found for request_id: {request_id} — skipping trace") return body + chat_id = inlet_data["chat_id"] user_email = inlet_data["user_email"] or (user.get("email") if user else "unknown") model = inlet_data["model"] or body.get("model", "unknown") user_input = inlet_data["input"] @@ -147,6 +154,8 @@ async def outlet(self, body: dict, user: Optional[dict] = None) -> dict: self.log(f"MLflow trace logged for chat_id: {chat_id}") except Exception as e: - self.log(f"Failed to log MLflow trace: {e}") + warning = f"[WARNING] Failed to log MLflow trace ({type(e).__name__}) for chat_id: {chat_id}: {e}" + print(warning) + self.log(warning) return body From e83ef4b52fd82506be8455ccbc0d03e6c5380892 Mon Sep 17 00:00:00 2001 From: Weichen Xu Date: Mon, 27 Apr 2026 19:02:26 +0800 Subject: [PATCH 05/10] update Signed-off-by: Weichen Xu --- .idea/.gitignore | 5 +++++ .idea/inspectionProfiles/profiles_settings.xml | 6 ++++++ .idea/misc.xml | 7 +++++++ .idea/modules.xml | 8 ++++++++ .idea/pipelines.iml | 14 ++++++++++++++ .idea/vcs.xml | 7 +++++++ examples/filters/mlflow_filter_pipeline.py | 9 +++++---- 7 files changed, 52 insertions(+), 4 deletions(-) create mode 100644 .idea/.gitignore create mode 100644 .idea/inspectionProfiles/profiles_settings.xml create mode 100644 .idea/misc.xml create mode 100644 .idea/modules.xml create mode 100644 .idea/pipelines.iml create mode 100644 .idea/vcs.xml diff --git a/.idea/.gitignore b/.idea/.gitignore new file mode 100644 index 00000000..b58b603f --- /dev/null +++ b/.idea/.gitignore @@ -0,0 +1,5 @@ +# Default ignored files +/shelf/ +/workspace.xml +# Editor-based HTTP Client requests +/httpRequests/ diff --git a/.idea/inspectionProfiles/profiles_settings.xml b/.idea/inspectionProfiles/profiles_settings.xml new file mode 100644 index 00000000..105ce2da --- /dev/null +++ b/.idea/inspectionProfiles/profiles_settings.xml @@ -0,0 +1,6 @@ + + + + \ No newline at end of file diff --git a/.idea/misc.xml b/.idea/misc.xml new file mode 100644 index 00000000..1817382f --- /dev/null +++ b/.idea/misc.xml @@ -0,0 +1,7 @@ + + + + + + \ No newline at end of file diff --git a/.idea/modules.xml b/.idea/modules.xml new file mode 100644 index 00000000..7f014d4d --- /dev/null +++ b/.idea/modules.xml @@ -0,0 +1,8 @@ + + + + + + + + \ No newline at end of file diff --git a/.idea/pipelines.iml b/.idea/pipelines.iml new file mode 100644 index 00000000..922ee79c --- /dev/null +++ b/.idea/pipelines.iml @@ -0,0 +1,14 @@ + + + + + + + + + + + + \ No newline at end of file diff --git a/.idea/vcs.xml b/.idea/vcs.xml new file mode 100644 index 00000000..83067447 --- /dev/null +++ b/.idea/vcs.xml @@ -0,0 +1,7 @@ + + + + + + + \ No newline at end of file diff --git a/examples/filters/mlflow_filter_pipeline.py b/examples/filters/mlflow_filter_pipeline.py index cdd1282c..abbae01c 100644 --- a/examples/filters/mlflow_filter_pipeline.py +++ b/examples/filters/mlflow_filter_pipeline.py @@ -53,7 +53,7 @@ def __init__(self): def log(self, message: str): if self.valves.debug: - print(f"[DEBUG] {message}") + print(f"[DEBUG] {message}", flush=True) async def on_startup(self): self.log(f"on_startup triggered for {__name__}") @@ -81,13 +81,11 @@ async def inlet(self, body: dict, user: Optional[dict] = None) -> dict: chat_id = body.get("chat_id") or metadata.get("chat_id") or str(uuid.uuid4()) if chat_id == "local": - session_id = metadata.get("session_id") or body.get("session_id") or str(uuid.uuid4()) + session_id = metadata.get("session_id") or str(uuid.uuid4()) metadata["session_id"] = session_id - body["session_id"] = session_id chat_id = f"temporary-session-{session_id}" metadata["chat_id"] = chat_id - body["chat_id"] = chat_id body["metadata"] = metadata # Per-request ID so concurrent requests for the same chat don't collide in pending_inlets @@ -159,3 +157,6 @@ async def outlet(self, body: dict, user: Optional[dict] = None) -> dict: self.log(warning) return body + + +print("loaded mlfow filter pipeline.") From 7355e5713d224a4b0180b47b2f436b35c6811726 Mon Sep 17 00:00:00 2001 From: Weichen Xu Date: Mon, 27 Apr 2026 19:02:51 +0800 Subject: [PATCH 06/10] update Signed-off-by: Weichen Xu --- .gitignore | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.gitignore b/.gitignore index d454a74c..1860d2ed 100644 --- a/.gitignore +++ b/.gitignore @@ -1,4 +1,5 @@ __pycache__ +.idea .env /litellm @@ -9,4 +10,4 @@ pipelines/* .DS_Store .venv -venv/ \ No newline at end of file +venv/ From 6c60c38ce0bbf33210e23c6ce65c2dbd82e15aea Mon Sep 17 00:00:00 2001 From: Weichen Xu Date: Mon, 27 Apr 2026 20:04:47 +0800 Subject: [PATCH 07/10] update Signed-off-by: Weichen Xu --- examples/filters/mlflow_filter_pipeline.py | 25 ++++++++-------------- 1 file changed, 9 insertions(+), 16 deletions(-) diff --git a/examples/filters/mlflow_filter_pipeline.py b/examples/filters/mlflow_filter_pipeline.py index abbae01c..cefcdc5f 100644 --- a/examples/filters/mlflow_filter_pipeline.py +++ b/examples/filters/mlflow_filter_pipeline.py @@ -47,13 +47,10 @@ def __init__(self): } ) - # Keyed by per-request ID (stored in metadata["mlflow_request_id"]) so concurrent - # requests for the same chat don't overwrite each other's snapshots. self.pending_inlets: dict = {} def log(self, message: str): - if self.valves.debug: - print(f"[DEBUG] {message}", flush=True) + print(f"[DEBUG] {message}", flush=True) async def on_startup(self): self.log(f"on_startup triggered for {__name__}") @@ -88,32 +85,28 @@ async def inlet(self, body: dict, user: Optional[dict] = None) -> dict: metadata["chat_id"] = chat_id body["metadata"] = metadata - # Per-request ID so concurrent requests for the same chat don't collide in pending_inlets - request_id = str(uuid.uuid4()) - metadata["mlflow_request_id"] = request_id - - self.pending_inlets[request_id] = { + self.pending_inlets[chat_id] = { "chat_id": chat_id, "input": get_last_user_message(body["messages"]), "model": body.get("model"), "user_email": user.get("email") if user else None, } - self.log(f"Stored inlet snapshot for request_id: {request_id}, chat_id: {chat_id}") + self.log(f"Stored inlet snapshot for chat_id: {chat_id}") return body async def outlet(self, body: dict, user: Optional[dict] = None) -> dict: self.log("MLflow Filter OUTLET called") - metadata = body.get("metadata", {}) - request_id = metadata.get("mlflow_request_id") + chat_id = body.get("chat_id") or body.get("metadata", {}).get("chat_id") + if not chat_id: + self.log("[WARNING] No chat_id in outlet body — skipping trace") + return body - inlet_data = self.pending_inlets.pop(request_id, None) if request_id else None + inlet_data = self.pending_inlets.pop(chat_id, None) if inlet_data is None: - self.log(f"[WARNING] No inlet snapshot found for request_id: {request_id} — skipping trace") + self.log(f"[WARNING] No inlet snapshot found for chat_id: {chat_id} — skipping trace") return body - - chat_id = inlet_data["chat_id"] user_email = inlet_data["user_email"] or (user.get("email") if user else "unknown") model = inlet_data["model"] or body.get("model", "unknown") user_input = inlet_data["input"] From 7d70ee9a266bf172064601d24d9d091024ed0fe0 Mon Sep 17 00:00:00 2001 From: Weichen Xu Date: Mon, 27 Apr 2026 20:05:39 +0800 Subject: [PATCH 08/10] clean Signed-off-by: Weichen Xu --- examples/filters/mlflow_filter_pipeline.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/examples/filters/mlflow_filter_pipeline.py b/examples/filters/mlflow_filter_pipeline.py index cefcdc5f..bd746105 100644 --- a/examples/filters/mlflow_filter_pipeline.py +++ b/examples/filters/mlflow_filter_pipeline.py @@ -146,10 +146,6 @@ async def outlet(self, body: dict, user: Optional[dict] = None) -> dict: self.log(f"MLflow trace logged for chat_id: {chat_id}") except Exception as e: warning = f"[WARNING] Failed to log MLflow trace ({type(e).__name__}) for chat_id: {chat_id}: {e}" - print(warning) self.log(warning) return body - - -print("loaded mlfow filter pipeline.") From 8a1a5923b2cfc2298d797d1bec4e8af7a6131b10 Mon Sep 17 00:00:00 2001 From: Weichen Xu Date: Mon, 27 Apr 2026 20:35:32 +0800 Subject: [PATCH 09/10] update Signed-off-by: Weichen Xu --- examples/filters/mlflow_filter_pipeline.py | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/examples/filters/mlflow_filter_pipeline.py b/examples/filters/mlflow_filter_pipeline.py index bd746105..0c878ebc 100644 --- a/examples/filters/mlflow_filter_pipeline.py +++ b/examples/filters/mlflow_filter_pipeline.py @@ -10,6 +10,7 @@ from typing import List, Optional import os +import re import uuid from utils.pipelines.main import get_last_assistant_message, get_last_user_message @@ -19,6 +20,17 @@ from mlflow.tracing.constant import SpanAttributeKey, TokenUsageKey +def extract_latest_user_input(text: str) -> str: + """If text contains a block, return only the last USER: segment inside it.""" + match = re.search(r'(.*?)', text, re.DOTALL) + if match: + history = match.group(1) + user_messages = re.findall(r'USER:\s*(.*?)(?=\s*ASSISTANT:|\s*$)', history, re.DOTALL) + if user_messages: + return user_messages[-1].strip() + return text + + def get_last_assistant_message_obj(messages: List[dict]) -> dict: for message in reversed(messages): if message["role"] == "assistant": @@ -87,7 +99,7 @@ async def inlet(self, body: dict, user: Optional[dict] = None) -> dict: self.pending_inlets[chat_id] = { "chat_id": chat_id, - "input": get_last_user_message(body["messages"]), + "input": extract_latest_user_input(get_last_user_message(body["messages"])), "model": body.get("model"), "user_email": user.get("email") if user else None, } From 99b33365fbde709ba0223de9fcd44ae345527653 Mon Sep 17 00:00:00 2001 From: Weichen Xu Date: Mon, 27 Apr 2026 20:37:14 +0800 Subject: [PATCH 10/10] clean Signed-off-by: Weichen Xu --- .idea/.gitignore | 5 ----- .idea/inspectionProfiles/profiles_settings.xml | 6 ------ .idea/misc.xml | 7 ------- .idea/modules.xml | 8 -------- .idea/pipelines.iml | 14 -------------- .idea/vcs.xml | 7 ------- 6 files changed, 47 deletions(-) delete mode 100644 .idea/.gitignore delete mode 100644 .idea/inspectionProfiles/profiles_settings.xml delete mode 100644 .idea/misc.xml delete mode 100644 .idea/modules.xml delete mode 100644 .idea/pipelines.iml delete mode 100644 .idea/vcs.xml diff --git a/.idea/.gitignore b/.idea/.gitignore deleted file mode 100644 index b58b603f..00000000 --- a/.idea/.gitignore +++ /dev/null @@ -1,5 +0,0 @@ -# Default ignored files -/shelf/ -/workspace.xml -# Editor-based HTTP Client requests -/httpRequests/ diff --git a/.idea/inspectionProfiles/profiles_settings.xml b/.idea/inspectionProfiles/profiles_settings.xml deleted file mode 100644 index 105ce2da..00000000 --- a/.idea/inspectionProfiles/profiles_settings.xml +++ /dev/null @@ -1,6 +0,0 @@ - - - - \ No newline at end of file diff --git a/.idea/misc.xml b/.idea/misc.xml deleted file mode 100644 index 1817382f..00000000 --- a/.idea/misc.xml +++ /dev/null @@ -1,7 +0,0 @@ - - - - - - \ No newline at end of file diff --git a/.idea/modules.xml b/.idea/modules.xml deleted file mode 100644 index 7f014d4d..00000000 --- a/.idea/modules.xml +++ /dev/null @@ -1,8 +0,0 @@ - - - - - - - - \ No newline at end of file diff --git a/.idea/pipelines.iml b/.idea/pipelines.iml deleted file mode 100644 index 922ee79c..00000000 --- a/.idea/pipelines.iml +++ /dev/null @@ -1,14 +0,0 @@ - - - - - - - - - - - - \ No newline at end of file diff --git a/.idea/vcs.xml b/.idea/vcs.xml deleted file mode 100644 index 83067447..00000000 --- a/.idea/vcs.xml +++ /dev/null @@ -1,7 +0,0 @@ - - - - - - - \ No newline at end of file