-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathanalysis.py
More file actions
165 lines (136 loc) · 5.43 KB
/
analysis.py
File metadata and controls
165 lines (136 loc) · 5.43 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
# analysis.py
from tqdm import trange
import pandas as pd
import numpy as np
from polygen import Poly
from polystat import resolve_target, TARGETS, basic_stats_1d
class ExperimentResult:
"""
Stores raw trial-level outputs and optional summary statistics.
Provides convenient accessors for analysis.
"""
def __init__(self, raw_results, summary_level="none", meta=None):
self.raw = raw_results
self._summary = {}
self._summary_level = summary_level
self.meta = meta or {} # store degree, dist, trials, etc.
if summary_level != "none":
self._compute_summary(level=summary_level)
def _is_scalar_list(self, values):
"""Check if trial values are all scalars (ints/floats)."""
for v in values:
if v is None:
continue
if not isinstance(v, (int, float, np.integer, np.floating)):
return False
return True
def _is_vector_list(self, values):
"""Return True if this looks like a list of arrays/vectors of real numbers."""
for v in values:
if v is None:
continue
if isinstance(v, (list, np.ndarray)):
return True
return False
def _compute_summary(self, level="basic"):
self._summary = {}
for name, values in self.raw.items():
# Case 1: scalar data
if self._is_scalar_list(values):
arr = pd.Series(values).dropna().to_numpy(dtype=float)
# Case 2: vector-valued data
elif self._is_vector_list(values):
arr = np.concatenate(
[np.atleast_1d(v).ravel() for v in values if v is not None]
)
# Skip complex arrays for now
if np.iscomplexobj(arr):
self._summary[name] = {"note": "complex data — skipped flatten stats"}
continue
else:
# Unsupported data type
self._summary[name] = {"note": "unsupported data type for summary"}
continue
# Edge case: empty array
if arr.size == 0:
self._summary[name] = {}
continue
# Compute basic or full stats
if level == "basic":
self._summary[name] = {
"mean": float(arr.mean()),
"std": float(arr.std(ddof=1)) if arr.size > 1 else float("nan"),
"min": float(arr.min()),
"max": float(arr.max()),
"median": float(np.median(arr)),
}
elif level == "full":
self._summary[name] = basic_stats_1d(arr)
else:
raise ValueError(f"Unknown summary level: {level}")
@property
def summary(self):
"""Return summary statistics dictionary."""
return self._summary
def to_dataframe(self):
"""Return raw results as a pandas DataFrame."""
return pd.DataFrame(self.raw)
def __repr__(self):
out = f"<ExperimentResult: {len(next(iter(self.raw.values()), []))} trials>"
if self._summary:
out += f" with summary level='{self._summary_level}'"
return out
class Experiment:
"""
Experiment runner for random polynomial statistics.
Example:
exp = Experiment(poly=Poly(10, Normal(0,1)),
trials=500,
outputs=["n_real", "real_gap_max"])
results = exp.run(summary="basic")
"""
def __init__(self, poly: Poly, trials: int = 100,
outputs=("n_real",), tol: float = 1e-10, cluster_tol: float = 1e-6):
self.poly = poly
self.trials = trials
self.outputs = outputs
self.tol = tol
self.cluster_tol = cluster_tol
# Validate outputs
for o in self.outputs:
if o not in TARGETS:
raise ValueError(f"Unknown output target: {o}")
# ----------------------------------------------------------
# Experiment runner
# ----------------------------------------------------------
def run(self, summary="none"):
"""
Run the experiment for the given number of trials.
Returns an ExperimentResult object with raw trial data and optional summaries.
Parameters
----------
summary : str
"none" -> only raw lists of values
"basic" -> mean, std, min, max, median (scalars and flattened vectors)
"full" -> full polystat.basic_stats_1d stats
"""
raw_results = {o: [] for o in self.outputs}
for _ in trange(self.trials, desc="Running experiment"):
p = self.poly.sample()
cache = {}
for o in self.outputs:
val = resolve_target(o, p, cache, tol=self.tol, cluster_tol=self.cluster_tol)
raw_results[o].append(val)
# Compact distribution summary
if all(str(d) == str(self.poly.dists[0]) for d in self.poly.dists):
dist_summary = f"iid {repr(self.poly.dists[0])}"
else:
dist_summary = repr(self.poly.dists)
meta = {
"degree": self.poly.degree,
"dist_summary": dist_summary,
"trials": self.trials,
"tol": self.tol,
"cluster_tol": self.cluster_tol,
}
return ExperimentResult(raw_results, summary_level=summary, meta=meta)