Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -243,6 +243,6 @@ There are unit tests included with the code which also require downloading test
- [ ] Interactive regime (`hvplot`, `bokeh`, `panel`)
- [x] Ghost cells support
- [x] Usage examples
- [ ] Parse the log file with timings
- [x] Parse the log file with timings
- [x] Raw reader
- [x] 3.14-compatible parallel output
Binary file added dist/nt2py-1.5.0-py3-none-any.whl
Binary file not shown.
Binary file added dist/nt2py-1.5.0.tar.gz
Binary file not shown.
2 changes: 1 addition & 1 deletion nt2/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
__version__ = "1.4.0"
__version__ = "1.5.0"

import nt2.containers.data as nt2_data

Expand Down
8 changes: 8 additions & 0 deletions nt2/containers/data.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ def override(method):
from nt2.utils import ToHumanReadable

import xarray as xr
import pandas as pd

from nt2.utils import (
DetermineDataFormat,
Expand All @@ -26,6 +27,7 @@ def override(method):
from nt2.containers.fields import Fields
from nt2.containers.particles import Particles
from nt2.containers.spectra import Spectra
from nt2.containers.diagnostics import Diagnostics

import nt2.plotters.polar as acc_polar
import nt2.plotters.particles as acc_particles
Expand Down Expand Up @@ -246,6 +248,7 @@ def __init__(
self.__coordinate_system = coord_system

super(Data, self).__init__(path=path, reader=self.__reader, remap=remap)
self.__diagnostics = Diagnostics(path)

def makeMovie(
self,
Expand Down Expand Up @@ -308,6 +311,11 @@ def attrs(self) -> Dict[str, Any]:
"""dict[str, Any]: The attributes of the data."""
return self.__attrs

@property
def diagnostics(self) -> Union[pd.DataFrame, None]:
"""pd.DataFrame or None: The diagnostics output if .out file is found, None otherwise."""
return self.__diagnostics.df

def to_str(self) -> str:
"""str: String representation of the all the enclosed dataframes."""

Expand Down
106 changes: 106 additions & 0 deletions nt2/containers/diagnostics.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,106 @@
from typing import Union
import pandas as pd


class Diagnostics:
df: Union[pd.DataFrame, None]

def __init__(self, path: str):
import os
import logging
import re

outfiles = [o for o in os.listdir(path) if o.endswith(".out")]
if len(outfiles) == 0:
logging.warning(f"No .out files found in {path}")
self.df = None
else:
self.outfile = os.path.join(path, outfiles[0])

data = {}

with open(self.outfile, "r") as f:
content = f.read()
steps = re.findall(r"Step:\s+(\d+)\.+\[", content)
times = re.findall(r"Time:\s+([\d.]+\d)\.+\[", content)
substeps = re.findall(r"\s+([A-Za-z]+)\.+([\d.]+)\s+([mµn]?s)", content)
species = re.findall(
r"\s+species\s+(\d+)\s+\(.+\)\.+([\deE+-.]+)(\s+\d+\%\s:\s\d+\%\s+)?([\deE+-.]+)?( : )?([\deE+-.]+)?",
content,
)

data["steps"] = []
for step in steps:
data["steps"].append(int(step))

data["times"] = []
for time in times:
data["times"].append(float(time))

assert len(data["steps"]) == len(
data["times"]
), "Number of steps and times do not match"

data["substeps"] = {}
for substep in substeps:
if substep[0] not in data["substeps"].keys():
data["substeps"][substep[0]] = []

def to_ns(value: float, unit: str) -> float:
if unit == "s":
return value * 1e9
elif unit == "ms":
return value * 1e6
elif unit == "µs":
return value * 1e3
elif unit == "ns":
return value
else:
raise ValueError(f"Unknown time unit: {unit}")

data["substeps"][substep[0]].append(
to_ns(float(substep[1]), substep[2])
)

for key in data["substeps"].keys():
assert len(data["substeps"][key]) == len(
data["steps"]
), f"Number of substep entries for {key} does not match number of steps"

data["species"] = {}
data["species_min"] = {}
data["species_max"] = {}
for specie in species:
if specie[0] not in data["species"].keys():
data["species"][specie[0]] = []
data["species_min"][specie[0]] = []
data["species_max"][specie[0]] = []
data["species"][specie[0]].append(int(float(specie[1])))
if len(specie) == 6:
data["species_min"][specie[0]].append(int(float(specie[3])))
data["species_max"][specie[0]].append(int(float(specie[5])))

for key in data["species"].keys():
assert len(data["species"][key]) == len(
data["steps"]
), f"Number of species entries for {key} does not match number of steps"
assert (
len(data["species_min"][key]) == len(data["steps"])
or len("species_min") == 0
), f"Number of species min entries for {key} does not match number of steps"
assert (
len(data["species_max"][key]) == len(data["steps"])
or len("species_max") == 0
), f"Number of species max entries for {key} does not match number of steps"

self.df = pd.DataFrame(index=data["steps"])
self.df["Step"] = data["steps"]
self.df["Time"] = data["times"]
for key in data["substeps"].keys():
self.df[key] = data["substeps"][key]
for key in data["species"].keys():
self.df[f"species_{key}"] = data["species"][key]
self.df[f"species_{key}_min"] = data["species_min"][key]
self.df[f"species_{key}_max"] = data["species_max"][key]

del data
62 changes: 32 additions & 30 deletions pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,37 +1,15 @@
[build-system]
requires = ["hatchling"]
build-backend = "hatchling.build"

[project]
name = "nt2py"
dynamic = ["version"]
dependencies = [
"types-setuptools",
"typing_extensions",
"dask[complete]",
"adios2",
"bokeh",
"xarray",
"numpy",
"scipy",
"matplotlib",
"tqdm",
"contourpy",
"typer",
"loky",
]
description = "Post-processing & visualization toolkit for the Entity PIC code"
readme = "README.md"
requires-python = ">=3.8"
license-files = ["LICENSE"]
authors = [{ name = "Hayk", email = "haykh.astro@gmail.com" }]
maintainers = [{ name = "Hayk", email = "haykh.astro@gmail.com" }]
description = "Post-processing & visualization toolkit for the Entity PIC code"
readme = "README.md"
license = { file = "LICENSE" }
classifiers = [
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Science/Research",
"Intended Audience :: Education",
"Topic :: Scientific/Engineering :: Physics",
"Topic :: Scientific/Engineering :: Astronomy",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: BSD License",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.8",
Expand All @@ -40,18 +18,42 @@ classifiers = [
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3.12",
"Programming Language :: Python :: 3.13",
"Programming Language :: Python :: 3.14",
"Topic :: Scientific/Engineering :: Astronomy",
"Topic :: Scientific/Engineering :: Physics",
]

[project.optional-dependencies]
hdf5 = ["h5py"]
dev = ["black", "pytest"]
dependencies = [
"adios2",
"bokeh",
"contourpy",
"dask[complete]",
"loky",
"matplotlib",
"numpy",
"pandas",
"scipy",
"tqdm",
"typer",
"types-setuptools",
"typing_extensions",
"xarray",
]
dynamic = ["version"]

[project.urls]
Repository = "https://github.com/entity-toolkit/nt2py"

[project.scripts]
nt2 = "nt2.cli.main:app"

[project.optional-dependencies]
dev = ["black", "pytest"]
hdf5 = ["h5py"]

[build-system]
requires = ["hatchling"]
build-backend = "hatchling.build"

[tool.hatch.version]
path = "nt2/__init__.py"

Expand Down