diff --git a/cmd/server/area_filter_test.go b/cmd/server/area_filter_test.go new file mode 100644 index 00000000..0d002bbb --- /dev/null +++ b/cmd/server/area_filter_test.go @@ -0,0 +1,269 @@ +package main + +import ( + "encoding/json" + "net/http/httptest" + "net/http" + "testing" + "time" + + "github.com/gorilla/mux" +) + +func mustExecDB(t *testing.T, db *DB, q string) { + t.Helper() + if _, err := db.conn.Exec(q); err != nil { + t.Fatalf("exec %q: %v", q, err) + } +} + +func TestAreaEntryParsing(t *testing.T) { + raw := `{ + "port": 3000, + "areas": { + "BEL": { + "label": "Belgium", + "polygon": [[50.0, 2.5], [51.5, 2.5], [51.5, 6.4], [50.0, 6.4]] + }, + "BOX": { + "label": "Bounding Box Area", + "latMin": 50.0, "latMax": 51.5, "lonMin": 2.5, "lonMax": 6.4 + } + } + }` + var cfg Config + if err := json.Unmarshal([]byte(raw), &cfg); err != nil { + t.Fatalf("unmarshal: %v", err) + } + if len(cfg.Areas) != 2 { + t.Fatalf("want 2 areas, got %d", len(cfg.Areas)) + } + bel := cfg.Areas["BEL"] + if bel.Label != "Belgium" { + t.Errorf("label: want Belgium, got %q", bel.Label) + } + if len(bel.Polygon) != 4 { + t.Errorf("polygon: want 4 points, got %d", len(bel.Polygon)) + } + box := cfg.Areas["BOX"] + if box.LatMin == nil || *box.LatMin != 50.0 { + t.Error("LatMin not parsed") + } +} + +func TestGetNodePubkeysInArea_Polygon(t *testing.T) { + db := setupTestDBv2(t) + mustExecDB(t, db, `INSERT INTO nodes (public_key, lat, lon) VALUES ('pk-inside', 50.85, 4.35)`) + mustExecDB(t, db, `INSERT INTO nodes (public_key, lat, lon) VALUES ('pk-outside', 48.0, 4.35)`) + mustExecDB(t, db, `INSERT INTO nodes (public_key, lat, lon) VALUES ('pk-nogps', NULL, NULL)`) + mustExecDB(t, db, `INSERT INTO nodes (public_key, lat, lon) VALUES ('pk-zero', 0.0, 0.0)`) + + entry := AreaEntry{ + Label: "Belgium", + Polygon: [][2]float64{{50.0, 2.5}, {51.5, 2.5}, {51.5, 6.4}, {50.0, 6.4}}, + } + pks, err := db.GetNodePubkeysInArea(entry) + if err != nil { + t.Fatalf("GetNodePubkeysInArea: %v", err) + } + if len(pks) != 1 || pks[0] != "pk-inside" { + t.Errorf("want [pk-inside], got %v", pks) + } +} + +// newTestStoreWithDB builds a minimal PacketStore wired to the given DB and config. +func newTestStoreWithDB(t *testing.T, db *DB, cfg *Config) *PacketStore { + t.Helper() + return &PacketStore{ + db: db, + config: cfg, + byNode: make(map[string][]*StoreTx), + byTxID: make(map[int]*StoreTx), + byObsID: make(map[int]*StoreObs), + byObserver: make(map[string][]*StoreObs), + byHash: make(map[string]*StoreTx), + byPayloadType: make(map[int][]*StoreTx), + nodeHashes: make(map[string]map[string]bool), + byPathHop: make(map[string][]*StoreTx), + advertPubkeys: make(map[string]int), + rfCache: make(map[string]*cachedResult), + topoCache: make(map[string]*cachedResult), + hashCache: make(map[string]*cachedResult), + collisionCache: make(map[string]*cachedResult), + chanCache: make(map[string]*cachedResult), + distCache: make(map[string]*cachedResult), + subpathCache: make(map[string]*cachedResult), + regionObsCache: make(map[string]map[string]bool), + areaNodeCache: make(map[string]map[string]bool), + rfCacheTTL: 15 * time.Second, + } +} + +func TestResolveAreaNodes_UnknownKey(t *testing.T) { + db := setupTestDBv2(t) + cfg := &Config{Areas: map[string]AreaEntry{ + "BEL": {Label: "Belgium", Polygon: [][2]float64{{50.0, 2.5}, {51.5, 2.5}, {51.5, 6.4}, {50.0, 6.4}}}, + }} + s := newTestStoreWithDB(t, db, cfg) + result := s.resolveAreaNodes("UNKNOWN") + if result != nil { + t.Errorf("want nil for unknown area, got %v", result) + } +} + +func TestResolveAreaNodes_CacheHit(t *testing.T) { + db := setupTestDBv2(t) + mustExecDB(t, db, `INSERT INTO nodes (public_key, lat, lon) VALUES ('pk1', 50.85, 4.35)`) + + cfg := &Config{Areas: map[string]AreaEntry{ + "BEL": {Label: "Belgium", Polygon: [][2]float64{{50.0, 2.5}, {51.5, 2.5}, {51.5, 6.4}, {50.0, 6.4}}}, + }} + s := newTestStoreWithDB(t, db, cfg) + + r1 := s.resolveAreaNodes("BEL") + if !r1["pk1"] { + t.Fatal("pk1 should be in area BEL") + } + + r2 := s.resolveAreaNodes("BEL") + if !r2["pk1"] { + t.Fatal("cache hit should still contain pk1") + } +} + +// ingestAdvert adds a synthetic ADVERT packet to the store's in-memory packet list. +func ingestAdvert(t *testing.T, s *PacketStore, hash, decodedJSON string) { + t.Helper() + pt := PayloadADVERT + tx := &StoreTx{ + Hash: hash, + FirstSeen: "2026-01-01T00:00:00Z", + PayloadType: &pt, + DecodedJSON: decodedJSON, + } + s.mu.Lock() + s.packets = append(s.packets, tx) + s.byHash[hash] = tx + s.byPayloadType[PayloadADVERT] = append(s.byPayloadType[PayloadADVERT], tx) + s.mu.Unlock() +} + +func TestFilterPacketsByArea(t *testing.T) { + db := setupTestDBv2(t) + mustExecDB(t, db, `INSERT INTO nodes (public_key, lat, lon) VALUES ('inside-node', 50.85, 4.35)`) + mustExecDB(t, db, `INSERT INTO nodes (public_key, lat, lon) VALUES ('outside-node', 48.0, 4.35)`) + + cfg := &Config{Areas: map[string]AreaEntry{ + "BEL": {Label: "Belgium", Polygon: [][2]float64{{50.0, 2.5}, {51.5, 2.5}, {51.5, 6.4}, {50.0, 6.4}}}, + }} + s := newTestStoreWithDB(t, db, cfg) + + ingestAdvert(t, s, "hash-in", `{"public_key":"inside-node","name":"Inside"}`) + ingestAdvert(t, s, "hash-out", `{"public_key":"outside-node","name":"Outside"}`) + + result := s.QueryPackets(PacketQuery{Limit: 50, Area: "BEL"}) + if result.Total != 1 { + t.Fatalf("want 1 packet in area BEL, got %d (packets: %v)", result.Total, result.Packets) + } +} + +func TestAnalyticsRFAreaFilter(t *testing.T) { + db := setupTestDBv2(t) + mustExecDB(t, db, `INSERT INTO nodes (public_key, lat, lon) VALUES ('inside-node', 50.85, 4.35)`) + + cfg := &Config{Areas: map[string]AreaEntry{ + "BEL": {Label: "Belgium", Polygon: [][2]float64{{50.0, 2.5}, {51.5, 2.5}, {51.5, 6.4}, {50.0, 6.4}}}, + }} + s := newTestStoreWithDB(t, db, cfg) + + result := s.GetAnalyticsRF("", "BEL") + if result == nil { + t.Fatal("GetAnalyticsRF returned nil") + } +} + +func TestAnalyticsChannelsAreaFilter(t *testing.T) { + db := setupTestDBv2(t) + cfg := &Config{Areas: map[string]AreaEntry{ + "BEL": {Label: "Belgium", Polygon: [][2]float64{{50.0, 2.5}, {51.5, 2.5}, {51.5, 6.4}, {50.0, 6.4}}}, + }} + s := newTestStoreWithDB(t, db, cfg) + result := s.GetAnalyticsChannels("", "BEL") + if result == nil { + t.Fatal("GetAnalyticsChannels returned nil") + } +} + +func TestGetNodePubkeysInArea_BoundingBox(t *testing.T) { + db := setupTestDBv2(t) + mustExecDB(t, db, `INSERT INTO nodes (public_key, lat, lon) VALUES ('in', 50.5, 5.0)`) + mustExecDB(t, db, `INSERT INTO nodes (public_key, lat, lon) VALUES ('out', 52.0, 5.0)`) + + minLat, maxLat, minLon, maxLon := 50.0, 51.5, 2.5, 6.4 + entry := AreaEntry{LatMin: &minLat, LatMax: &maxLat, LonMin: &minLon, LonMax: &maxLon} + pks, err := db.GetNodePubkeysInArea(entry) + if err != nil { + t.Fatalf("%v", err) + } + if len(pks) != 1 || pks[0] != "in" { + t.Errorf("want [in], got %v", pks) + } +} + +func TestHandleConfigAreas(t *testing.T) { + db := setupTestDBv2(t) + cfg := &Config{Areas: map[string]AreaEntry{ + "BEL": {Label: "Belgium", Polygon: [][2]float64{{50.0, 2.5}, {51.5, 2.5}, {51.5, 6.4}, {50.0, 6.4}}}, + "MST": {Label: "Maastricht"}, + }} + + r := mux.NewRouter() + srv := &Server{db: db, cfg: cfg} + r.HandleFunc("/api/config/areas", srv.handleConfigAreas).Methods("GET") + + req := httptest.NewRequest(http.MethodGet, "/api/config/areas", nil) + w := httptest.NewRecorder() + r.ServeHTTP(w, req) + + if w.Code != 200 { + t.Fatalf("want 200, got %d", w.Code) + } + var result []map[string]string + if err := json.NewDecoder(w.Body).Decode(&result); err != nil { + t.Fatalf("decode: %v", err) + } + if len(result) != 2 { + t.Fatalf("want 2 areas, got %d", len(result)) + } + keys := map[string]bool{} + for _, entry := range result { + keys[entry["key"]] = true + if entry["label"] == "" { + t.Errorf("missing label for key %q", entry["key"]) + } + } + if !keys["BEL"] || !keys["MST"] { + t.Errorf("expected BEL and MST, got %v", keys) + } +} + +func TestHandleConfigAreasEmpty(t *testing.T) { + db := setupTestDBv2(t) + cfg := &Config{} + + r := mux.NewRouter() + srv := &Server{db: db, cfg: cfg} + r.HandleFunc("/api/config/areas", srv.handleConfigAreas).Methods("GET") + + req := httptest.NewRequest(http.MethodGet, "/api/config/areas", nil) + w := httptest.NewRecorder() + r.ServeHTTP(w, req) + + var result []interface{} + if err := json.NewDecoder(w.Body).Decode(&result); err != nil { + t.Fatalf("decode: %v", err) + } + if len(result) != 0 { + t.Errorf("want empty array, got %v", result) + } +} diff --git a/cmd/server/clock_skew.go b/cmd/server/clock_skew.go index 0b2c363b..ecb96db8 100644 --- a/cmd/server/clock_skew.go +++ b/cmd/server/clock_skew.go @@ -579,7 +579,12 @@ func (s *PacketStore) getNodeClockSkewLocked(pubkey string) *NodeClockSkew { // GetFleetClockSkew returns clock skew data for all nodes that have skew data. // Must NOT be called with s.mu held. -func (s *PacketStore) GetFleetClockSkew() []*NodeClockSkew { +func (s *PacketStore) GetFleetClockSkew(area string) []*NodeClockSkew { + var areaNodes map[string]bool + if area != "" { + areaNodes = s.resolveAreaNodes(area) + } + s.mu.RLock() defer s.mu.RUnlock() @@ -592,6 +597,9 @@ func (s *PacketStore) GetFleetClockSkew() []*NodeClockSkew { var results []*NodeClockSkew for pubkey := range s.byNode { + if areaNodes != nil && !areaNodes[pubkey] { + continue + } cs := s.getNodeClockSkewLocked(pubkey) if cs == nil { continue diff --git a/cmd/server/collision_details_test.go b/cmd/server/collision_details_test.go index c7dff57f..c59492e2 100644 --- a/cmd/server/collision_details_test.go +++ b/cmd/server/collision_details_test.go @@ -33,7 +33,7 @@ func TestCollisionDetailsIncludeNodePairs(t *testing.T) { store := NewPacketStore(db, nil) store.Load() - result := store.GetAnalyticsHashCollisions("") + result := store.GetAnalyticsHashCollisions("", "") bySize, ok := result["by_size"].(map[string]interface{}) if !ok { t.Fatal("expected by_size map") @@ -109,7 +109,7 @@ func TestCollisionDetailsEmptyWhenNoCollisions(t *testing.T) { store := NewPacketStore(db, nil) store.Load() - result := store.GetAnalyticsHashCollisions("") + result := store.GetAnalyticsHashCollisions("", "") bySize, ok := result["by_size"].(map[string]interface{}) if !ok { t.Fatal("expected by_size map") diff --git a/cmd/server/config.go b/cmd/server/config.go index 6039d41e..e81f03f2 100644 --- a/cmd/server/config.go +++ b/cmd/server/config.go @@ -11,6 +11,16 @@ import ( "github.com/meshcore-analyzer/geofilter" ) +// AreaEntry defines a geographic area by polygon or bounding box. +type AreaEntry struct { + Label string `json:"label"` + Polygon [][2]float64 `json:"polygon,omitempty"` + LatMin *float64 `json:"latMin,omitempty"` + LatMax *float64 `json:"latMax,omitempty"` + LonMin *float64 `json:"lonMin,omitempty"` + LonMax *float64 `json:"lonMax,omitempty"` +} + // Config mirrors the Node.js config.json structure (read-only fields). type Config struct { Port int `json:"port"` @@ -66,6 +76,8 @@ type Config struct { GeoFilter *GeoFilterConfig `json:"geo_filter,omitempty"` + Areas map[string]AreaEntry `json:"areas,omitempty"` + Timestamps *TimestampConfig `json:"timestamps,omitempty"` DebugAffinity bool `json:"debugAffinity,omitempty"` diff --git a/cmd/server/coverage_test.go b/cmd/server/coverage_test.go index f6228c7f..35380eaa 100644 --- a/cmd/server/coverage_test.go +++ b/cmd/server/coverage_test.go @@ -2159,7 +2159,7 @@ func TestStoreGetBulkHealthWithStore(t *testing.T) { store := NewPacketStore(db, nil) store.Load() - results := store.GetBulkHealth(50, "") + results := store.GetBulkHealth(50, "", "") if len(results) == 0 { t.Error("expected bulk health results") } @@ -2174,7 +2174,7 @@ func TestStoreGetBulkHealthWithStore(t *testing.T) { } t.Run("with region filter", func(t *testing.T) { - results := store.GetBulkHealth(50, "SJC") + results := store.GetBulkHealth(50, "SJC", "") _ = results }) } @@ -2185,7 +2185,7 @@ func TestStoreGetAnalyticsHashSizes(t *testing.T) { store := NewPacketStore(db, nil) store.Load() - result := store.GetAnalyticsHashSizes("") + result := store.GetAnalyticsHashSizes("", "") if result["total"] == nil { t.Error("expected total field") } @@ -2196,7 +2196,7 @@ func TestStoreGetAnalyticsHashSizes(t *testing.T) { _ = dist t.Run("with region", func(t *testing.T) { - r := store.GetAnalyticsHashSizes("SJC") + r := store.GetAnalyticsHashSizes("SJC", "") _ = r }) } @@ -2207,7 +2207,7 @@ func TestHashSizesDistributionByRepeatersFiltersRole(t *testing.T) { store := NewPacketStore(db, nil) store.Load() - result := store.GetAnalyticsHashSizes("") + result := store.GetAnalyticsHashSizes("", "") // distributionByRepeaters should only count repeater nodes. // Rich test DB: aabbccdd11223344 = repeater (hash size 2), eeff00112233aabb = companion (hash size 3). @@ -2404,13 +2404,13 @@ func TestStoreGetAnalyticsRFCacheHit(t *testing.T) { store.Load() // First call — cache miss - result1 := store.GetAnalyticsRF("") + result1 := store.GetAnalyticsRF("", "") if result1["totalPackets"] == nil { t.Error("expected totalPackets") } // Second call — should hit cache - result2 := store.GetAnalyticsRF("") + result2 := store.GetAnalyticsRF("", "") if result2["totalPackets"] == nil { t.Error("expected cached totalPackets") } @@ -2429,7 +2429,7 @@ func TestStoreGetAnalyticsTopology(t *testing.T) { store := NewPacketStore(db, nil) store.Load() - result := store.GetAnalyticsTopology("") + result := store.GetAnalyticsTopology("", "") if result == nil { t.Error("expected non-nil result") } @@ -2448,7 +2448,7 @@ func TestStoreGetAnalyticsTopology(t *testing.T) { } t.Run("with region", func(t *testing.T) { - r := store.GetAnalyticsTopology("SJC") + r := store.GetAnalyticsTopology("SJC", "") _ = r }) } @@ -2459,7 +2459,7 @@ func TestStoreGetAnalyticsChannels(t *testing.T) { store := NewPacketStore(db, nil) store.Load() - result := store.GetAnalyticsChannels("") + result := store.GetAnalyticsChannels("", "") if _, ok := result["activeChannels"]; !ok { t.Error("expected activeChannels") } @@ -2471,7 +2471,7 @@ func TestStoreGetAnalyticsChannels(t *testing.T) { } t.Run("with region", func(t *testing.T) { - r := store.GetAnalyticsChannels("SJC") + r := store.GetAnalyticsChannels("SJC", "") _ = r }) } @@ -2505,7 +2505,7 @@ func TestStoreGetAnalyticsChannelsNumericHash(t *testing.T) { store := NewPacketStore(db, nil) store.Load() - result := store.GetAnalyticsChannels("") + result := store.GetAnalyticsChannels("", "") channels := result["channels"].([]map[string]interface{}) if len(channels) < 2 { @@ -2551,13 +2551,13 @@ func TestStoreGetAnalyticsDistance(t *testing.T) { store := NewPacketStore(db, nil) store.Load() - result := store.GetAnalyticsDistance("") + result := store.GetAnalyticsDistance("", "") if result == nil { t.Error("expected non-nil result") } t.Run("with region", func(t *testing.T) { - r := store.GetAnalyticsDistance("SJC") + r := store.GetAnalyticsDistance("SJC", "") _ = r }) } @@ -2932,13 +2932,13 @@ func TestCacheHitTopology(t *testing.T) { store.Load() // First call — cache miss - r1 := store.GetAnalyticsTopology("") + r1 := store.GetAnalyticsTopology("", "") if r1 == nil { t.Fatal("expected topology result") } // Second call — cache hit - r2 := store.GetAnalyticsTopology("") + r2 := store.GetAnalyticsTopology("", "") if r2 == nil { t.Fatal("expected cached topology result") } @@ -2956,12 +2956,12 @@ func TestCacheHitHashSizes(t *testing.T) { store := NewPacketStore(db, nil) store.Load() - r1 := store.GetAnalyticsHashSizes("") + r1 := store.GetAnalyticsHashSizes("", "") if r1 == nil { t.Fatal("expected hash sizes result") } - r2 := store.GetAnalyticsHashSizes("") + r2 := store.GetAnalyticsHashSizes("", "") if r2 == nil { t.Fatal("expected cached hash sizes result") } @@ -2979,12 +2979,12 @@ func TestCacheHitChannels(t *testing.T) { store := NewPacketStore(db, nil) store.Load() - r1 := store.GetAnalyticsChannels("") + r1 := store.GetAnalyticsChannels("", "") if r1 == nil { t.Fatal("expected channels result") } - r2 := store.GetAnalyticsChannels("") + r2 := store.GetAnalyticsChannels("", "") if r2 == nil { t.Fatal("expected cached channels result") } @@ -3379,7 +3379,7 @@ func TestAnalyticsHashSizesZeroHopSkip(t *testing.T) { store := NewPacketStore(db, nil) store.Load() - result := store.GetAnalyticsHashSizes("") + result := store.GetAnalyticsHashSizes("", "") // The node should appear in multiByteNodes (hashSize=2 from the flood advert) // If the zero-hop bug is present, hashSize would be 1 and the node would NOT diff --git a/cmd/server/db.go b/cmd/server/db.go index aeb09769..51cc1257 100644 --- a/cmd/server/db.go +++ b/cmd/server/db.go @@ -11,6 +11,7 @@ import ( "sync" "time" + "github.com/meshcore-analyzer/geofilter" _ "modernc.org/sqlite" ) @@ -387,6 +388,7 @@ type PacketQuery struct { Since string Until string Region string + Area string // area key; filters by transmitting node's GPS position Node string Channel string // channel_hash filter (#812). Plain names like "#test"/"public" or "enc_" for encrypted Order string // ASC or DESC @@ -2334,6 +2336,44 @@ func (db *DB) GetDroppedPackets(limit int, observerID, nodePubkey string) ([]map return results, nil } +// GetNodePubkeysInArea returns public keys of nodes whose GPS coordinates +// fall inside the given area polygon or bounding box. +func (db *DB) GetNodePubkeysInArea(entry AreaEntry) ([]string, error) { + rows, err := db.conn.Query("SELECT public_key, lat, lon FROM nodes WHERE lat IS NOT NULL AND lon IS NOT NULL") + if err != nil { + return nil, err + } + defer rows.Close() + + gf := &geofilter.Config{ + Polygon: entry.Polygon, + LatMin: entry.LatMin, + LatMax: entry.LatMax, + LonMin: entry.LonMin, + LonMax: entry.LonMax, + } + + var result []string + for rows.Next() { + var pk string + var lat, lon sql.NullFloat64 + if err := rows.Scan(&pk, &lat, &lon); err != nil { + continue + } + if !lat.Valid || !lon.Valid { + continue + } + // Skip (0,0) — PassesFilter allows it but these nodes have no real GPS. + if lat.Float64 == 0 && lon.Float64 == 0 { + continue + } + if geofilter.PassesFilter(lat.Float64, lon.Float64, gf) { + result = append(result, pk) + } + } + return result, rows.Err() +} + // GetSignatureDropCount returns the total number of dropped packets. func (db *DB) GetSignatureDropCount() int64 { var count int64 diff --git a/cmd/server/main.go b/cmd/server/main.go index 22dc600e..7c368a4f 100644 --- a/cmd/server/main.go +++ b/cmd/server/main.go @@ -150,6 +150,7 @@ func main() { // In-memory packet store store := NewPacketStore(database, cfg.PacketStore, cfg.CacheTTL) + store.config = cfg if err := store.Load(); err != nil { log.Fatalf("[store] failed to load: %v", err) } diff --git a/cmd/server/routes.go b/cmd/server/routes.go index 70839b52..57f2d64d 100644 --- a/cmd/server/routes.go +++ b/cmd/server/routes.go @@ -117,6 +117,8 @@ func (s *Server) RegisterRoutes(r *mux.Router) { r.HandleFunc("/api/config/theme", s.handleConfigTheme).Methods("GET") r.HandleFunc("/api/config/map", s.handleConfigMap).Methods("GET") r.HandleFunc("/api/config/geo-filter", s.handleConfigGeoFilter).Methods("GET") + r.HandleFunc("/api/config/areas", s.handleConfigAreas).Methods("GET") + r.HandleFunc("/api/config/areas/polygons", s.handleConfigAreasPolygons).Methods("GET") // System endpoints r.HandleFunc("/api/health", s.handleHealth).Methods("GET") @@ -294,6 +296,46 @@ func (s *Server) handleConfigClient(w http.ResponseWriter, r *http.Request) { }) } +func (s *Server) handleConfigAreas(w http.ResponseWriter, r *http.Request) { + type areaListEntry struct { + Key string `json:"key"` + Label string `json:"label"` + } + result := make([]areaListEntry, 0, len(s.cfg.Areas)) + for k, v := range s.cfg.Areas { + if v.Label == "" { + continue // skip comment/invalid entries (e.g. "_comment" keys in config) + } + result = append(result, areaListEntry{Key: k, Label: v.Label}) + } + writeJSON(w, result) +} + +func (s *Server) handleConfigAreasPolygons(w http.ResponseWriter, r *http.Request) { + type areaDebugEntry struct { + Key string `json:"key"` + Label string `json:"label"` + Polygon [][2]float64 `json:"polygon,omitempty"` + LatMin *float64 `json:"latMin,omitempty"` + LatMax *float64 `json:"latMax,omitempty"` + LonMin *float64 `json:"lonMin,omitempty"` + LonMax *float64 `json:"lonMax,omitempty"` + } + result := make([]areaDebugEntry, 0, len(s.cfg.Areas)) + for k, v := range s.cfg.Areas { + result = append(result, areaDebugEntry{ + Key: k, + Label: v.Label, + Polygon: v.Polygon, + LatMin: v.LatMin, + LatMax: v.LatMax, + LonMin: v.LonMin, + LonMax: v.LonMax, + }) + } + writeJSON(w, result) +} + func (s *Server) handleConfigRegions(w http.ResponseWriter, r *http.Request) { regions := make(map[string]string) for k, v := range s.cfg.Regions { @@ -791,7 +833,8 @@ func (s *Server) handlePackets(w http.ResponseWriter, r *http.Request) { Until: r.URL.Query().Get("until"), Region: r.URL.Query().Get("region"), Node: r.URL.Query().Get("node"), - Channel: r.URL.Query().Get("channel"), + Channel: r.URL.Query().Get("channel"), + Area: r.URL.Query().Get("area"), Order: "DESC", ExpandObservations: r.URL.Query().Get("expand") == "observations", } @@ -1114,6 +1157,34 @@ func (s *Server) handleNodes(w http.ResponseWriter, r *http.Request) { total = len(filtered) nodes = filtered } + // Filter by area + if area := q.Get("area"); area != "" { + var areaNodes map[string]bool + if s.store != nil { + areaNodes = s.store.resolveAreaNodes(area) + } else if s.cfg != nil && s.cfg.Areas != nil { + if entry, ok := s.cfg.Areas[area]; ok { + pks, err := s.db.GetNodePubkeysInArea(entry) + if err == nil { + areaNodes = make(map[string]bool, len(pks)) + for _, pk := range pks { + areaNodes[pk] = true + } + } + } + } + if areaNodes != nil { + filtered := make([]map[string]interface{}, 0, len(nodes)) + for _, n := range nodes { + pk, _ := n["public_key"].(string) + if areaNodes[pk] { + filtered = append(filtered, n) + } + } + nodes = filtered + total = len(filtered) + } + } writeJSON(w, NodeListResponse{Nodes: nodes, Total: total, Counts: counts}) } @@ -1196,7 +1267,8 @@ func (s *Server) handleBulkHealth(w http.ResponseWriter, r *http.Request) { if s.store != nil { region := r.URL.Query().Get("region") - results := s.store.GetBulkHealth(limit, region) + area := r.URL.Query().Get("area") + results := s.store.GetBulkHealth(limit, region, area) // Filter blacklisted nodes if len(s.cfg.NodeBlacklist) > 0 { filtered := make([]map[string]interface{}, 0, len(results)) @@ -1484,15 +1556,17 @@ func (s *Server) handleFleetClockSkew(w http.ResponseWriter, r *http.Request) { writeJSON(w, []*NodeClockSkew{}) return } - writeJSON(w, s.store.GetFleetClockSkew()) + area := r.URL.Query().Get("area") + writeJSON(w, s.store.GetFleetClockSkew(area)) } // --- Analytics Handlers --- func (s *Server) handleAnalyticsRF(w http.ResponseWriter, r *http.Request) { region := r.URL.Query().Get("region") + area := r.URL.Query().Get("area") if s.store != nil { - writeJSON(w, s.store.GetAnalyticsRF(region)) + writeJSON(w, s.store.GetAnalyticsRF(region, area)) return } writeJSON(w, RFAnalyticsResponse{ @@ -1511,8 +1585,9 @@ func (s *Server) handleAnalyticsRF(w http.ResponseWriter, r *http.Request) { func (s *Server) handleAnalyticsTopology(w http.ResponseWriter, r *http.Request) { region := r.URL.Query().Get("region") + area := r.URL.Query().Get("area") if s.store != nil { - data := s.store.GetAnalyticsTopology(region) + data := s.store.GetAnalyticsTopology(region, area) if s.cfg != nil && len(s.cfg.NodeBlacklist) > 0 { data = s.filterBlacklistedFromTopology(data) } @@ -1534,7 +1609,8 @@ func (s *Server) handleAnalyticsTopology(w http.ResponseWriter, r *http.Request) func (s *Server) handleAnalyticsChannels(w http.ResponseWriter, r *http.Request) { if s.store != nil { region := r.URL.Query().Get("region") - writeJSON(w, s.store.GetAnalyticsChannels(region)) + area := r.URL.Query().Get("area") + writeJSON(w, s.store.GetAnalyticsChannels(region, area)) return } channels, _ := s.db.GetChannels() @@ -1553,8 +1629,9 @@ func (s *Server) handleAnalyticsChannels(w http.ResponseWriter, r *http.Request) func (s *Server) handleAnalyticsDistance(w http.ResponseWriter, r *http.Request) { region := r.URL.Query().Get("region") + area := r.URL.Query().Get("area") if s.store != nil { - writeJSON(w, s.store.GetAnalyticsDistance(region)) + writeJSON(w, s.store.GetAnalyticsDistance(region, area)) return } writeJSON(w, DistanceAnalyticsResponse{ @@ -1570,7 +1647,8 @@ func (s *Server) handleAnalyticsDistance(w http.ResponseWriter, r *http.Request) func (s *Server) handleAnalyticsHashSizes(w http.ResponseWriter, r *http.Request) { if s.store != nil { region := r.URL.Query().Get("region") - writeJSON(w, s.store.GetAnalyticsHashSizes(region)) + area := r.URL.Query().Get("area") + writeJSON(w, s.store.GetAnalyticsHashSizes(region, area)) return } writeJSON(w, map[string]interface{}{ @@ -1586,7 +1664,8 @@ func (s *Server) handleAnalyticsHashSizes(w http.ResponseWriter, r *http.Request func (s *Server) handleAnalyticsHashCollisions(w http.ResponseWriter, r *http.Request) { if s.store != nil { region := r.URL.Query().Get("region") - writeJSON(w, s.store.GetAnalyticsHashCollisions(region)) + area := r.URL.Query().Get("area") + writeJSON(w, s.store.GetAnalyticsHashCollisions(region, area)) return } writeJSON(w, map[string]interface{}{ diff --git a/cmd/server/routes_test.go b/cmd/server/routes_test.go index 4ac15f54..77311db7 100644 --- a/cmd/server/routes_test.go +++ b/cmd/server/routes_test.go @@ -2509,7 +2509,7 @@ func TestHashAnalyticsZeroHopAdvert(t *testing.T) { } // Capture baseline from seed data (bypass cache via computeAnalyticsHashSizes) - baseline := store.computeAnalyticsHashSizes("") + baseline := store.computeAnalyticsHashSizes("", "") baseTotal, _ := baseline["total"].(int) baseDist, _ := baseline["distribution"].(map[string]int) baseDist1 := baseDist["1"] @@ -2535,7 +2535,7 @@ func TestHashAnalyticsZeroHopAdvert(t *testing.T) { store.packets = append(store.packets, tx) store.byPayloadType[4] = append(store.byPayloadType[4], tx) - result := store.computeAnalyticsHashSizes("") + result := store.computeAnalyticsHashSizes("", "") // distributionByRepeaters should include the zero-hop advert's node distByRepeaters, ok := result["distributionByRepeaters"].(map[string]int) @@ -2595,7 +2595,7 @@ func TestAnalyticsHashSizeSameNameDifferentPubkey(t *testing.T) { store.byPayloadType[4] = append(store.byPayloadType[4], tx) } - result := store.GetAnalyticsHashSizes("") + result := store.GetAnalyticsHashSizes("", "") distByRepeaters, ok := result["distributionByRepeaters"].(map[string]int) if !ok { @@ -3528,7 +3528,7 @@ func TestHashCollisionsOnlyRepeaters(t *testing.T) { store.hashSizeInfoAt = time.Now() store.hashSizeInfoMu.Unlock() - result := store.computeHashCollisions("") + result := store.computeHashCollisions("", "") bySize, ok := result["by_size"].(map[string]interface{}) if !ok { diff --git a/cmd/server/store.go b/cmd/server/store.go index 496edac1..a5284d12 100644 --- a/cmd/server/store.go +++ b/cmd/server/store.go @@ -168,6 +168,12 @@ type PacketStore struct { regionObsMu sync.Mutex regionObsCache map[string]map[string]bool regionObsCacheTime time.Time + // Cached area key → node pubkey set (30s TTL) + areaNodeMu sync.Mutex + areaNodeCache map[string]map[string]bool + areaNodeCacheTime time.Time + // Full server config — needed for Areas map in resolveAreaNodes. + config *Config // Cached node list + prefix map (rebuilt on demand, shared across analytics) nodeCache []nodeInfo nodePM *prefixMap @@ -401,6 +407,7 @@ func NewPacketStore(db *DB, cfg *PacketStoreConfig, cacheTTLs ...map[string]inte lastSeenTouched: make(map[string]time.Time), clockSkew: NewClockSkewEngine(), useResolvedPathIndex: true, + areaNodeCache: make(map[string]map[string]bool), } ps.initResolvedPathIndex() if cfg != nil { @@ -631,9 +638,14 @@ func (s *PacketStore) Load() error { } } - // Post-load: pick best observation (longest path) for each transmission + // Post-load: pick best observation (longest path) for each transmission, + // then re-index so relay hops from resolved_path land in byNode. + // indexByNode was called earlier (on StoreTx creation) before observations + // were appended, so tx.ResolvedPath was nil at that point — call it again + // now that pickBestObservation has propagated the best path. for _, tx := range s.packets { pickBestObservation(tx) + s.indexByNode(tx) } // Build precomputed subpath index for O(1) analytics queries @@ -865,7 +877,7 @@ func (s *PacketStore) QueryGroupedPackets(q PacketQuery) *PacketResult { } // Cache key covers all filter dimensions. Empty key = no filters. - cacheKey := q.Since + "|" + q.Until + "|" + q.Region + "|" + q.Node + "|" + q.Hash + "|" + q.Observer + "|" + q.Channel + cacheKey := q.Since + "|" + q.Until + "|" + q.Region + "|" + q.Area + "|" + q.Node + "|" + q.Hash + "|" + q.Observer + "|" + q.Channel if q.Type != nil { cacheKey += fmt.Sprintf("|t%d", *q.Type) } @@ -2179,7 +2191,7 @@ func (s *PacketStore) MaxObservationID() int { func (s *PacketStore) filterPackets(q PacketQuery) []*StoreTx { // Fast path: single-key index lookups if q.Hash != "" && q.Type == nil && q.Route == nil && q.Observer == "" && - q.Region == "" && q.Node == "" && q.Channel == "" && q.Since == "" && q.Until == "" { + q.Region == "" && q.Area == "" && q.Node == "" && q.Channel == "" && q.Since == "" && q.Until == "" { h := strings.ToLower(q.Hash) tx := s.byHash[h] if tx == nil { @@ -2188,7 +2200,7 @@ func (s *PacketStore) filterPackets(q PacketQuery) []*StoreTx { return []*StoreTx{tx} } if q.Observer != "" && q.Type == nil && q.Route == nil && - q.Region == "" && q.Node == "" && q.Channel == "" && q.Hash == "" && q.Since == "" && q.Until == "" { + q.Region == "" && q.Area == "" && q.Node == "" && q.Channel == "" && q.Hash == "" && q.Since == "" && q.Until == "" { return s.transmissionsForObserver(q.Observer, nil) } @@ -2234,6 +2246,12 @@ func (s *PacketStore) filterPackets(q PacketQuery) []*StoreTx { } } + // Pre-compute area node set. + var areaNodes map[string]bool + if q.Area != "" { + areaNodes = s.resolveAreaNodes(q.Area) + } + // Pre-compute node filter parameters. var nodePK string var nodeHashSet map[string]bool @@ -2251,7 +2269,7 @@ func (s *PacketStore) filterPackets(q PacketQuery) []*StoreTx { // filter is active and an index exists. source := s.packets if hasNode && !hasType && !hasRoute && q.Observer == "" && - filterHash == "" && !hasSince && !hasUntil && q.Region == "" && filterChannel == "" { + filterHash == "" && !hasSince && !hasUntil && q.Region == "" && q.Area == "" && filterChannel == "" { if indexed, ok := s.byNode[nodePK]; ok { return indexed } @@ -2297,6 +2315,19 @@ func (s *PacketStore) filterPackets(q PacketQuery) []*StoreTx { return false } } + if areaNodes != nil { + // Only ADVERT packets carry the originator pubkey (public_key/pubKey). + // All other packet types (GRP_TXT, TXT_MSG, REQ, …) have encrypted + // senders so pk == "" and are excluded when an area filter is active. + d := tx.ParsedDecoded() + pk, _ := d["public_key"].(string) + if pk == "" { + pk, _ = d["pubKey"].(string) + } + if pk == "" || !areaNodes[pk] { + return false + } + } if hasNode { if !nodeHashSet[tx.Hash] { return false @@ -2419,6 +2450,44 @@ func (s *PacketStore) fetchAndCacheRegionObs(region string) map[string]bool { return m } +// resolveAreaNodes returns a set of node pubkeys whose GPS coordinates fall +// inside the named area polygon. Returns nil if the area key is not in config. +// Results are cached for 30 seconds. Uses its own mutex so callers holding +// s.mu won't deadlock. +func (s *PacketStore) resolveAreaNodes(areaKey string) map[string]bool { + if s.config == nil || s.config.Areas == nil { + return nil + } + entry, ok := s.config.Areas[areaKey] + if !ok { + return nil + } + + s.areaNodeMu.Lock() + defer s.areaNodeMu.Unlock() + + if s.areaNodeCache != nil && time.Since(s.areaNodeCacheTime) < 30*time.Second { + if m, ok := s.areaNodeCache[areaKey]; ok { + return m + } + } else { + s.areaNodeCache = make(map[string]map[string]bool) + s.areaNodeCacheTime = time.Now() + } + + pks, err := s.db.GetNodePubkeysInArea(entry) + if err != nil || len(pks) == 0 { + s.areaNodeCache[areaKey] = nil + return nil + } + m := make(map[string]bool, len(pks)) + for _, pk := range pks { + m[pk] = true + } + s.areaNodeCache[areaKey] = m + return m +} + // enrichObs returns a map with observation fields + transmission fields. func (s *PacketStore) enrichObs(obs *StoreObs) map[string]interface{} { tx := s.byTxID[obs.TransmissionID] @@ -3768,9 +3837,10 @@ func (s *PacketStore) GetChannelMessages(channelHash string, limit, offset int, } // GetAnalyticsChannels returns full channel analytics computed from in-memory packets. -func (s *PacketStore) GetAnalyticsChannels(region string) map[string]interface{} { +func (s *PacketStore) GetAnalyticsChannels(region, area string) map[string]interface{} { + cacheKey := region + "|" + area s.cacheMu.Lock() - if cached, ok := s.chanCache[region]; ok && time.Now().Before(cached.expiresAt) { + if cached, ok := s.chanCache[cacheKey]; ok && time.Now().Before(cached.expiresAt) { s.cacheHits++ s.cacheMu.Unlock() return cached.data @@ -3778,16 +3848,16 @@ func (s *PacketStore) GetAnalyticsChannels(region string) map[string]interface{} s.cacheMisses++ s.cacheMu.Unlock() - result := s.computeAnalyticsChannels(region) + result := s.computeAnalyticsChannels(region, area) s.cacheMu.Lock() - s.chanCache[region] = &cachedResult{data: result, expiresAt: time.Now().Add(s.rfCacheTTL)} + s.chanCache[cacheKey] = &cachedResult{data: result, expiresAt: time.Now().Add(s.rfCacheTTL)} s.cacheMu.Unlock() return result } -func (s *PacketStore) computeAnalyticsChannels(region string) map[string]interface{} { +func (s *PacketStore) computeAnalyticsChannels(region, area string) map[string]interface{} { s.mu.RLock() defer s.mu.RUnlock() @@ -3795,6 +3865,10 @@ func (s *PacketStore) computeAnalyticsChannels(region string) map[string]interfa if region != "" { regionObs = s.resolveRegionObservers(region) } + var areaNodes map[string]bool + if area != "" { + areaNodes = s.resolveAreaNodes(area) + } type decodedGrp struct { Type string `json:"type"` @@ -3848,6 +3922,16 @@ func (s *PacketStore) computeAnalyticsChannels(region string) map[string]interfa continue } } + if areaNodes != nil { + d := tx.ParsedDecoded() + pk, _ := d["public_key"].(string) + if pk == "" { + pk, _ = d["pubKey"].(string) + } + if pk == "" || !areaNodes[pk] { + continue + } + } var decoded decodedGrp if json.Unmarshal([]byte(tx.DecodedJSON), &decoded) != nil { @@ -3964,9 +4048,10 @@ func (s *PacketStore) computeAnalyticsChannels(region string) map[string]interfa } // GetAnalyticsRF returns full RF analytics computed from in-memory observations. -func (s *PacketStore) GetAnalyticsRF(region string) map[string]interface{} { +func (s *PacketStore) GetAnalyticsRF(region, area string) map[string]interface{} { + cacheKey := region + "|" + area s.cacheMu.Lock() - if cached, ok := s.rfCache[region]; ok && time.Now().Before(cached.expiresAt) { + if cached, ok := s.rfCache[cacheKey]; ok && time.Now().Before(cached.expiresAt) { s.cacheHits++ s.cacheMu.Unlock() return cached.data @@ -3974,16 +4059,16 @@ func (s *PacketStore) GetAnalyticsRF(region string) map[string]interface{} { s.cacheMisses++ s.cacheMu.Unlock() - result := s.computeAnalyticsRF(region) + result := s.computeAnalyticsRF(region, area) s.cacheMu.Lock() - s.rfCache[region] = &cachedResult{data: result, expiresAt: time.Now().Add(s.rfCacheTTL)} + s.rfCache[cacheKey] = &cachedResult{data: result, expiresAt: time.Now().Add(s.rfCacheTTL)} s.cacheMu.Unlock() return result } -func (s *PacketStore) computeAnalyticsRF(region string) map[string]interface{} { +func (s *PacketStore) computeAnalyticsRF(region, area string) map[string]interface{} { s.mu.RLock() defer s.mu.RUnlock() @@ -3993,6 +4078,10 @@ func (s *PacketStore) computeAnalyticsRF(region string) map[string]interface{} { if region != "" { regionObs = s.resolveRegionObservers(region) } + var areaNodes map[string]bool + if area != "" { + areaNodes = s.resolveAreaNodes(area) + } // Collect all observations matching the region estCap := s.totalObs @@ -4024,6 +4113,16 @@ func (s *PacketStore) computeAnalyticsRF(region string) map[string]interface{} { for _, obs := range obsList { totalObs++ tx := s.byTxID[obs.TransmissionID] + if areaNodes != nil && tx != nil { + d := tx.ParsedDecoded() + pk, _ := d["public_key"].(string) + if pk == "" { + pk, _ = d["pubKey"].(string) + } + if pk == "" || !areaNodes[pk] { + continue + } + } hash := "" if tx != nil { hash = tx.Hash @@ -4107,6 +4206,16 @@ func (s *PacketStore) computeAnalyticsRF(region string) map[string]interface{} { } else { // No region: iterate all transmissions and their observations for _, tx := range s.packets { + if areaNodes != nil { + d := tx.ParsedDecoded() + pk, _ := d["public_key"].(string) + if pk == "" { + pk, _ = d["pubKey"].(string) + } + if pk == "" || !areaNodes[pk] { + continue + } + } hash := tx.Hash if hash != "" { regionalHashes[hash] = true @@ -4770,9 +4879,10 @@ func parsePathJSON(pathJSON string) []string { return hops } -func (s *PacketStore) GetAnalyticsTopology(region string) map[string]interface{} { +func (s *PacketStore) GetAnalyticsTopology(region, area string) map[string]interface{} { + cacheKey := region + "|" + area s.cacheMu.Lock() - if cached, ok := s.topoCache[region]; ok && time.Now().Before(cached.expiresAt) { + if cached, ok := s.topoCache[cacheKey]; ok && time.Now().Before(cached.expiresAt) { s.cacheHits++ s.cacheMu.Unlock() return cached.data @@ -4780,16 +4890,16 @@ func (s *PacketStore) GetAnalyticsTopology(region string) map[string]interface{} s.cacheMisses++ s.cacheMu.Unlock() - result := s.computeAnalyticsTopology(region) + result := s.computeAnalyticsTopology(region, area) s.cacheMu.Lock() - s.topoCache[region] = &cachedResult{data: result, expiresAt: time.Now().Add(s.rfCacheTTL)} + s.topoCache[cacheKey] = &cachedResult{data: result, expiresAt: time.Now().Add(s.rfCacheTTL)} s.cacheMu.Unlock() return result } -func (s *PacketStore) computeAnalyticsTopology(region string) map[string]interface{} { +func (s *PacketStore) computeAnalyticsTopology(region, area string) map[string]interface{} { s.mu.RLock() defer s.mu.RUnlock() @@ -4797,6 +4907,10 @@ func (s *PacketStore) computeAnalyticsTopology(region string) map[string]interfa if region != "" { regionObs = s.resolveRegionObservers(region) } + var areaNodes map[string]bool + if area != "" { + areaNodes = s.resolveAreaNodes(area) + } allNodes, pm := s.getCachedNodesAndPM() _ = allNodes // only pm is needed for topology @@ -4836,7 +4950,6 @@ func (s *PacketStore) computeAnalyticsTopology(region string) map[string]interfa continue } } - n := len(hops) hopCounts[n]++ allHopsList = append(allHopsList, n) @@ -4844,10 +4957,25 @@ func (s *PacketStore) computeAnalyticsTopology(region string) map[string]interfa hopSnr[n] = append(hopSnr[n], *tx.SNR) } for _, h := range hops { + // Area filter: only count hops belonging to nodes in the area. + if areaNodes != nil { + r := resolveHop(h) + if r == nil || !areaNodes[r.PublicKey] { + continue + } + } hopFreq[h]++ } for i := 0; i < len(hops)-1; i++ { a, b := hops[i], hops[i+1] + // Area filter: only count pairs where both nodes are in the area. + if areaNodes != nil { + rA := resolveHop(a) + rB := resolveHop(b) + if rA == nil || !areaNodes[rA.PublicKey] || rB == nil || !areaNodes[rB.PublicKey] { + continue + } + } if a > b { a, b = b, a } @@ -4862,6 +4990,12 @@ func (s *PacketStore) computeAnalyticsTopology(region string) map[string]interfa perObserver[obsID] = map[string]*struct{ minDist, maxDist, count int }{} } for i, h := range hops { + if areaNodes != nil { + r := resolveHop(h) + if r == nil || !areaNodes[r.PublicKey] { + continue + } + } dist := n - i entry := perObserver[obsID][h] if entry == nil { @@ -5141,9 +5275,10 @@ func haversineKm(lat1, lon1, lat2, lon2 float64) float64 { return R * 2 * math.Atan2(math.Sqrt(a), math.Sqrt(1-a)) } -func (s *PacketStore) GetAnalyticsDistance(region string) map[string]interface{} { +func (s *PacketStore) GetAnalyticsDistance(region, area string) map[string]interface{} { + cacheKey := region + "|" + area s.cacheMu.Lock() - if cached, ok := s.distCache[region]; ok && time.Now().Before(cached.expiresAt) { + if cached, ok := s.distCache[cacheKey]; ok && time.Now().Before(cached.expiresAt) { s.cacheHits++ s.cacheMu.Unlock() return cached.data @@ -5151,16 +5286,16 @@ func (s *PacketStore) GetAnalyticsDistance(region string) map[string]interface{} s.cacheMisses++ s.cacheMu.Unlock() - result := s.computeAnalyticsDistance(region) + result := s.computeAnalyticsDistance(region, area) s.cacheMu.Lock() - s.distCache[region] = &cachedResult{data: result, expiresAt: time.Now().Add(s.rfCacheTTL)} + s.distCache[cacheKey] = &cachedResult{data: result, expiresAt: time.Now().Add(s.rfCacheTTL)} s.cacheMu.Unlock() return result } -func (s *PacketStore) computeAnalyticsDistance(region string) map[string]interface{} { +func (s *PacketStore) computeAnalyticsDistance(region, area string) map[string]interface{} { s.mu.RLock() defer s.mu.RUnlock() @@ -5168,6 +5303,10 @@ func (s *PacketStore) computeAnalyticsDistance(region string) map[string]interfa if region != "" { regionObs = s.resolveRegionObservers(region) } + var areaNodes map[string]bool + if area != "" { + areaNodes = s.resolveAreaNodes(area) + } // Build region match set using precomputed tx pointers var matchSet map[*StoreTx]bool @@ -5202,6 +5341,51 @@ func (s *PacketStore) computeAnalyticsDistance(region string) map[string]interfa } } + // Additionally filter matchSet by area nodes + if areaNodes != nil && matchSet != nil { + for tx := range matchSet { + d := tx.ParsedDecoded() + pk, _ := d["public_key"].(string) + if pk == "" { + pk, _ = d["pubKey"].(string) + } + if pk == "" || !areaNodes[pk] { + delete(matchSet, tx) + } + } + } else if areaNodes != nil { + // No region filter but area filter: build matchSet from area nodes + matchSet = make(map[*StoreTx]bool) + for i := range s.distHops { + tx := s.distHops[i].tx + if matchSet[tx] { + continue + } + d := tx.ParsedDecoded() + pk, _ := d["public_key"].(string) + if pk == "" { + pk, _ = d["pubKey"].(string) + } + if pk != "" && areaNodes[pk] { + matchSet[tx] = true + } + } + for i := range s.distPaths { + tx := s.distPaths[i].tx + if matchSet[tx] { + continue + } + d := tx.ParsedDecoded() + pk, _ := d["public_key"].(string) + if pk == "" { + pk, _ = d["pubKey"].(string) + } + if pk != "" && areaNodes[pk] { + matchSet[tx] = true + } + } + } + // Filter precomputed hop records (copy to avoid mutating precomputed data during sort) filteredHops := make([]distHopRecord, 0, len(s.distHops)) for i := range s.distHops { @@ -5391,9 +5575,10 @@ func (s *PacketStore) computeAnalyticsDistance(region string) map[string]interfa // --- Hash Sizes Analytics --- -func (s *PacketStore) GetAnalyticsHashSizes(region string) map[string]interface{} { +func (s *PacketStore) GetAnalyticsHashSizes(region, area string) map[string]interface{} { + cacheKey := region + "|" + area s.cacheMu.Lock() - if cached, ok := s.hashCache[region]; ok && time.Now().Before(cached.expiresAt) { + if cached, ok := s.hashCache[cacheKey]; ok && time.Now().Before(cached.expiresAt) { s.cacheHits++ s.cacheMu.Unlock() return cached.data @@ -5401,10 +5586,10 @@ func (s *PacketStore) GetAnalyticsHashSizes(region string) map[string]interface{ s.cacheMisses++ s.cacheMu.Unlock() - result := s.computeAnalyticsHashSizes(region) + result := s.computeAnalyticsHashSizes(region, area) // Add multi-byte capability data (only for unfiltered/global view) - if region == "" { + if region == "" && area == "" { // Pass adopter hash sizes so capability can cross-reference adopterHS := make(map[string]int) if mbNodes, ok := result["multiByteNodes"].([]map[string]interface{}); ok { @@ -5420,13 +5605,13 @@ func (s *PacketStore) GetAnalyticsHashSizes(region string) map[string]interface{ } s.cacheMu.Lock() - s.hashCache[region] = &cachedResult{data: result, expiresAt: time.Now().Add(s.rfCacheTTL)} + s.hashCache[cacheKey] = &cachedResult{data: result, expiresAt: time.Now().Add(s.rfCacheTTL)} s.cacheMu.Unlock() return result } -func (s *PacketStore) computeAnalyticsHashSizes(region string) map[string]interface{} { +func (s *PacketStore) computeAnalyticsHashSizes(region, area string) map[string]interface{} { s.mu.RLock() defer s.mu.RUnlock() @@ -5434,6 +5619,10 @@ func (s *PacketStore) computeAnalyticsHashSizes(region string) map[string]interf if region != "" { regionObs = s.resolveRegionObservers(region) } + var areaNodes map[string]bool + if area != "" { + areaNodes = s.resolveAreaNodes(area) + } allNodes, pm := s.getCachedNodesAndPM() @@ -5465,6 +5654,16 @@ func (s *PacketStore) computeAnalyticsHashSizes(region string) map[string]interf continue } } + if areaNodes != nil { + d := tx.ParsedDecoded() + pk, _ := d["public_key"].(string) + if pk == "" { + pk, _ = d["pubKey"].(string) + } + if pk == "" || !areaNodes[pk] { + continue + } + } // Parse header and path byte if len(tx.RawHex) < 4 { @@ -5666,9 +5865,10 @@ type hashSizeNodeInfo struct { // GetAnalyticsHashCollisions returns pre-computed hash collision analysis. // This moves the O(n²) distance computation from the frontend to the server. -func (s *PacketStore) GetAnalyticsHashCollisions(region string) map[string]interface{} { +func (s *PacketStore) GetAnalyticsHashCollisions(region, area string) map[string]interface{} { + cacheKey := region + "|" + area s.cacheMu.Lock() - if cached, ok := s.collisionCache[region]; ok && time.Now().Before(cached.expiresAt) { + if cached, ok := s.collisionCache[cacheKey]; ok && time.Now().Before(cached.expiresAt) { s.cacheHits++ s.cacheMu.Unlock() return cached.data @@ -5676,10 +5876,10 @@ func (s *PacketStore) GetAnalyticsHashCollisions(region string) map[string]inter s.cacheMisses++ s.cacheMu.Unlock() - result := s.computeHashCollisions(region) + result := s.computeHashCollisions(region, area) s.cacheMu.Lock() - s.collisionCache[region] = &cachedResult{data: result, expiresAt: time.Now().Add(s.collisionCacheTTL)} + s.collisionCache[cacheKey] = &cachedResult{data: result, expiresAt: time.Now().Add(s.collisionCacheTTL)} s.cacheMu.Unlock() return result @@ -5721,7 +5921,7 @@ type twoByteCellInfo struct { CollisionCount int `json:"collision_count"` } -func (s *PacketStore) computeHashCollisions(region string) map[string]interface{} { +func (s *PacketStore) computeHashCollisions(region, area string) map[string]interface{} { // Get all nodes from DB nodes := s.getAllNodes() hashInfo := s.GetNodeHashSizeInfo() @@ -5775,6 +5975,20 @@ func (s *PacketStore) computeHashCollisions(region string) map[string]interface{ } } + // If area is specified, filter to only nodes in the area + if area != "" { + areaNodes := s.resolveAreaNodes(area) + if areaNodes != nil { + filtered := make([]nodeInfo, 0, len(nodes)) + for _, n := range nodes { + if areaNodes[n.PublicKey] { + filtered = append(filtered, n) + } + } + nodes = filtered + } + } + // Build collision nodes with hash info var allCNodes []collisionNode for _, n := range nodes { @@ -6323,7 +6537,7 @@ func (s *PacketStore) computeMultiByteCapability(adopterHashSizes map[string]int // --- Bulk Health (in-memory) --- -func (s *PacketStore) GetBulkHealth(limit int, region string) []map[string]interface{} { +func (s *PacketStore) GetBulkHealth(limit int, region, area string) []map[string]interface{} { s.mu.RLock() defer s.mu.RUnlock() @@ -6354,10 +6568,16 @@ func (s *PacketStore) GetBulkHealth(limit int, region string) []map[string]inter } } - // Get nodes from DB + // Area filtering — resolveAreaNodes requires the lock to already be held + var areaNodes map[string]bool + if area != "" { + areaNodes = s.resolveAreaNodes(area) + } + + // Get nodes from DB — fetch more when filtering so we don't under-fill after exclusions queryLimit := limit - if regionNodeKeys != nil { - queryLimit = 500 + if regionNodeKeys != nil || areaNodes != nil { + queryLimit = 10000 } rows, err := s.db.conn.Query("SELECT public_key, name, role, lat, lon FROM nodes ORDER BY last_seen DESC LIMIT ?", queryLimit) if err != nil { @@ -6378,15 +6598,19 @@ func (s *PacketStore) GetBulkHealth(limit int, region string) []map[string]inter if regionNodeKeys != nil && !regionNodeKeys[pk] { continue } + if areaNodes != nil && !areaNodes[pk] { + continue + } nodes = append(nodes, dbNode{ pk: pk, name: nullStrVal(name), role: nullStrVal(role), lat: nullFloat(lat), lon: nullFloat(lon), }) - if regionNodeKeys == nil && len(nodes) >= limit { + if regionNodeKeys == nil && areaNodes == nil && len(nodes) >= limit { break } } - if regionNodeKeys != nil && len(nodes) > limit { + // Only cap to limit in the global (no-filter) case; area/region returns full filtered set + if regionNodeKeys != nil && areaNodes == nil && len(nodes) > limit { nodes = nodes[:limit] } diff --git a/config.example.json b/config.example.json index 5672ed31..b9fd0a42 100644 --- a/config.example.json +++ b/config.example.json @@ -166,6 +166,25 @@ "bufferKm": 20, "_comment": "Optional. Restricts ingestion and API responses to nodes within the polygon + bufferKm. Polygon is an array of [lat, lon] pairs (minimum 3). Use tools/geofilter-builder.html to draw a polygon visually. Remove this section to disable filtering. Nodes with no GPS fix are always allowed through." }, + "areas": { + "_comment": "Optional. GPS-based display filter. Each entry defines a geographic area by polygon ([lat, lon] pairs) or bounding box (latMin/latMax/lonMin/lonMax). Packets and nodes are attributed to an area based on the transmitting node's own GPS coordinates — not the observer's location. Areas appear as a filter pill bar in the dashboard. Remove this section to disable the area filter UI.", + "BAY": { + "label": "Bay Area", + "polygon": [ + [37.90, -122.55], + [37.90, -121.75], + [37.25, -121.75], + [37.25, -122.55] + ] + }, + "SJC": { + "label": "San Jose", + "latMin": 37.20, + "latMax": 37.45, + "lonMin": -122.05, + "lonMax": -121.75 + } + }, "regions": { "SJC": "San Jose, US", "SFO": "San Francisco, US", diff --git a/docs/api-spec.md b/docs/api-spec.md index 082b8861..d9f71307 100644 --- a/docs/api-spec.md +++ b/docs/api-spec.md @@ -3,8 +3,8 @@ > **Authoritative contract.** Both the Node.js and Go backends MUST conform to this spec. > The frontend relies on these exact shapes. Breaking changes require a spec update first. -**Version:** 1.0.0 -**Last updated:** 2025-07-17 +**Version:** 1.1.0 +**Last updated:** 2026-04-22 --- @@ -44,12 +44,17 @@ - [GET /api/traces/:hash](#get-apitraceshash) - [GET /api/config/theme](#get-apiconfigtheme) - [GET /api/config/regions](#get-apiconfigregions) +- [GET /api/config/areas](#get-apiconfigareas) +- [GET /api/config/areas/polygons](#get-apiconfigareaspolygons) - [GET /api/config/client](#get-apiconfigclient) - [GET /api/config/cache](#get-apiconfigcache) - [GET /api/config/map](#get-apiconfigmap) - [GET /api/iata-coords](#get-apiiata-coords) +- [GET /api/nodes/clock-skew](#get-apinodesclock-skew) +- [GET /api/analytics/hash-collisions](#get-apianalyticshash-collisions) - [GET /api/audio-lab/buckets](#get-apiaudio-labbuckets) - [WebSocket Messages](#websocket-messages) +- [Area Filter](#area-filter) --- @@ -286,6 +291,7 @@ Paginated node list with filtering. | `offset` | number | `0` | Pagination offset | | `role` | string | — | Filter by role: `repeater`, `room`, `companion`, `sensor` | | `region` | string | — | Comma-separated IATA codes for regional filtering | +| `area` | string | — | Area key from `config.json` — filters to nodes whose GPS falls inside the area polygon (see [Area Filter](#area-filter)) | | `lastHeard`| string | — | Recency filter: `1h`, `6h`, `24h`, `7d`, `30d` | | `sortBy` | string | `lastSeen` | Sort key: `name`, `lastSeen`, `packetCount` | | `search` | string | — | Substring match on `name` | @@ -1076,6 +1082,7 @@ RF signal analytics. | Param | Type | Default | Description | |----------|--------|---------|-------------------------------------| | `region` | string | — | Comma-separated IATA codes | +| `area` | string | — | Area key — restricts to packets whose transmitter GPS falls in the area (ADVERT packets only; see [Area Filter](#area-filter)) | ### Response `200` @@ -1146,6 +1153,7 @@ Network topology analytics. | Param | Type | Default | Description | |----------|--------|---------|-------------------------------------| | `region` | string | — | Comma-separated IATA codes | +| `area` | string | — | Area key — only hops that resolve to nodes inside the area are counted in repeater/pair frequency tables | ### Response `200` @@ -1241,6 +1249,7 @@ Channel analytics. | Param | Type | Default | Description | |----------|--------|---------|-------------------------------------| | `region` | string | — | Comma-separated IATA codes | +| `area` | string | — | Area key — area filtering is supported but not exposed in the dashboard (channel stats are observer-based) | ### Response `200` @@ -1279,6 +1288,7 @@ Hop distance analytics. | Param | Type | Default | Description | |----------|--------|---------|-------------------------------------| | `region` | string | — | Comma-separated IATA codes | +| `area` | string | — | Area key — restricts distance calculations to paths where the transmitter GPS falls in the area | ### Response `200` @@ -1343,6 +1353,7 @@ Hash size analysis across the network. | Param | Type | Default | Description | |----------|--------|---------|-------------------------------------| | `region` | string | — | Comma-separated IATA codes | +| `area` | string | — | Area key — restricts to packets from nodes in the area | ### Response `200` @@ -1380,6 +1391,73 @@ Hash size analysis across the network. --- +## GET /api/analytics/hash-collisions + +Hash collision analysis — packets where the same hash was used by multiple different nodes (ambiguous routing). + +### Query Parameters + +| Param | Type | Default | Description | +|----------|--------|---------|-------------------------------------| +| `region` | string | — | Comma-separated IATA codes | +| `area` | string | — | Area key — restricts to packets from nodes in the area | + +### Response `200` + +```jsonc +{ + "collisions": [ + { + "hash": string, // hop hex prefix that collides + "count": number, // number of distinct nodes sharing this prefix + "nodes": [ + { + "pubkey": string, + "name": string | null, + "count": number // observation count for this node + } + ] + } + ], + "totalCollisions": number, + "affectedPackets": number +} +``` + +--- + +## GET /api/nodes/clock-skew + +Fleet-wide clock skew data. Returns all nodes for which clock skew has been calculated from ADVERT timestamp pairs. + +### Query Parameters + +| Param | Type | Default | Description | +|--------|--------|---------|-----------------------------------------------------| +| `area` | string | — | Area key — restricts to nodes whose GPS falls in the area | + +### Response `200` + +Returns a JSON array (not wrapped in an object): + +```jsonc +[ + { + "pubkey": string, + "nodeName": string | null, + "nodeRole": string | null, + "skewMs": number | null, // current estimated clock offset (ms) + "driftPerDaySec": number | null, // drift rate (seconds/day) + "severity": string, // "good" | "warning" | "critical" + "samples": null // always null in fleet response (too large) + } +] +``` + +**Note:** This is a bare array, not `{ nodes: [...] }`. + +--- + ## GET /api/analytics/subpaths Subpath frequency analysis. @@ -1594,6 +1672,51 @@ Returns a flat key-value object. --- +## GET /api/config/areas + +Available area filters defined in `config.json` under `areas`. Used by the frontend to populate the area pill bar. Entries with an empty `label` (e.g. comment keys) are excluded. + +### Response `200` + +```jsonc +[ + { + "key": string, // area key as defined in config (e.g. "bayarea") + "label": string // display name (e.g. "Bay Area") + } +] +``` + +Returns `[]` when no areas are configured. + +**Note:** Polygon coordinates are **not** included. Use `/api/config/areas/polygons` for the full geometry. + +--- + +## GET /api/config/areas/polygons + +Full area definitions including polygon/bounding-box coordinates. Intended for map rendering tools (e.g. the area-map debug tool). + +### Response `200` + +```jsonc +[ + { + "key": string, + "label": string, + "polygon": [[number, number]] | undefined, // [lat, lon] pairs (if polygon-style) + "latMin": number | undefined, // bounding-box style + "latMax": number | undefined, + "lonMin": number | undefined, + "lonMax": number | undefined + } +] +``` + +Returns `[]` when no areas are configured. + +--- + ## GET /api/config/client Client-side configuration values. @@ -1908,3 +2031,61 @@ A single observation of a transmission by an observer: | 1 | `FLOOD` | Flood/broadcast | | 2 | (reserved) | | | 3 | `TRANSPORT` | Transport (with transport codes) | + +--- + +## Area Filter + +The `?area=` query parameter is a **display-side geographic filter** that attributes data to a region based on the **transmitting node's own GPS coordinates**, as broadcast in its ADVERT packets. It is distinct from the observer-based `?region=` filter. + +### Configuration + +Areas are defined in `config.json` under the `areas` key: + +```jsonc +{ + "areas": { + "bayarea": { + "label": "Bay Area", + "polygon": [[37.9, -122.5], [37.9, -121.9], [37.3, -121.9], [37.3, -122.5]] + }, + "sanjose": { + "label": "San Jose", + "latMin": 37.25, "latMax": 37.45, + "lonMin": -122.05, "lonMax": -121.75 + } + } +} +``` + +Each entry may use either a `polygon` (array of `[lat, lon]` pairs, minimum 3 points) or a bounding box (`latMin`/`latMax`/`lonMin`/`lonMax`). The polygon check uses standard ray-casting point-in-polygon. + +### Attribution rules + +| Packet type | Area-attributable? | Reason | +|-------------|-------------------|--------| +| ADVERT (4) | Yes | Carries `public_key` + transmitter GPS in payload | +| GRP_TXT (5), TXT_MSG (2), REQ (0), others | No | Sender is encrypted; origin cannot be determined | + +When `?area=` is active, **only ADVERT packets** (and nodes derived from them) are included in filtered results. All other packet types are excluded. This is by design — non-ADVERT packets have encrypted senders and cannot be attributed to a geographic origin. + +### GPS staleness + +Node GPS coordinates are read from the `nodes` table, which is updated on ADVERT ingest. A node that moves between areas will not be re-attributed until its next ADVERT (typically 12–24 hours for repeaters). The area node set is cached for 30 seconds server-side. + +### Endpoints supporting `?area=` + +| Endpoint | Area support | +|----------|-------------| +| `GET /api/nodes` | Filters node list by GPS in area | +| `GET /api/analytics/rf` | Restricts RF stats to ADVERT packets from area nodes | +| `GET /api/analytics/topology` | Counts only hops that resolve to nodes in the area | +| `GET /api/analytics/channels` | Supported (not used by dashboard UI) | +| `GET /api/analytics/distance` | Restricts distance paths to area-node transmitters | +| `GET /api/analytics/hash-sizes` | Restricts hash analysis to area-node packets | +| `GET /api/analytics/hash-collisions` | Restricts collision analysis to area-node packets | +| `GET /api/nodes/clock-skew` | Restricts fleet clock skew list to nodes in area | + +### Cross-antimeridian polygons + +Polygons that span the 180° meridian (antimeridian) are **not supported** — ray-casting point-in-polygon breaks at the date line. Split such areas into two separate entries. diff --git a/docs/user-guide/analytics.md b/docs/user-guide/analytics.md index eb2f99b8..020962f1 100644 --- a/docs/user-guide/analytics.md +++ b/docs/user-guide/analytics.md @@ -83,6 +83,10 @@ Test hash prefix lengths to see how many collisions different sizes would produc All analytics tabs respect the **region filter** at the top. Select a region to scope the data to observers in that area. +## Area filter + +If [areas are configured](area-filter.md), an area pill bar also appears. Selecting an area scopes all analytics to nodes whose GPS position falls within that area. This is based on the transmitting node's own coordinates — not the observer's location — so it avoids cross-region pollution from distant observers. + ## Deep linking Each tab is deep-linkable. Share a URL like `#/analytics?tab=collisions` to point someone directly at hash issues. diff --git a/docs/user-guide/area-filter.md b/docs/user-guide/area-filter.md new file mode 100644 index 00000000..e22d57f0 --- /dev/null +++ b/docs/user-guide/area-filter.md @@ -0,0 +1,110 @@ +# Area Filter + +The area filter is a **GPS-based display filter** that scopes the dashboard to nodes within a defined geographic area. It is distinct from the [region filter](configuration.md#regions), which groups data by the observer's IATA location code. + +## How it differs from the region filter + +| | Region filter | Area filter | +|--|--|--| +| Based on | Observer's IATA code (from MQTT topic) | Transmitting node's own GPS coordinates | +| Set by | MQTT topic structure | Node's advertised GPS position | +| Use case | Separate traffic by observer location | Separate traffic by where nodes physically are | + +Because the region filter is observer-based, a node broadcasting in San Jose can appear under "San Francisco" if a San Francisco observer hears it first. The area filter avoids this cross-region pollution by attributing packets to areas based on where the **sending node** is located. + +## Configuration + +Add an `areas` block to `config.json`: + +```json +"areas": { + "BAY": { + "label": "Bay Area", + "polygon": [ + [37.90, -122.55], + [37.90, -121.75], + [37.25, -121.75], + [37.25, -122.55] + ] + }, + "SJC": { + "label": "San Jose", + "latMin": 37.20, + "latMax": 37.45, + "lonMin": -122.05, + "lonMax": -121.75 + } +} +``` + +Each entry defines one area. Two shape formats are supported: + +| Format | Fields | Notes | +|--------|--------|-------| +| Polygon | `polygon: [[lat, lon], ...]` | At least 3 points. Supports irregular shapes. | +| Bounding box | `latMin`, `latMax`, `lonMin`, `lonMax` | Simpler rectangles. | + +The `label` field controls what appears in the filter pill bar in the UI. + +Remove the `areas` block to disable the area filter entirely — the pill bar disappears automatically. + +### Nodes without GPS + +Nodes with no GPS fix (`lat=0, lon=0` or missing coordinates) are always allowed through regardless of the active area filter. This prevents nodes that haven't advertised a position yet from being hidden. + +## Using the area filter + +When `areas` is configured, a pill bar appears below the main navigation on: + +- **Packets** — shows only packets where the transmitting node is within the selected area +- **Nodes** — shows only nodes whose GPS position falls within the area +- **Analytics** — all charts and tables are scoped to nodes in the area +- **Channels** — channel message list is scoped to the area + +Click a pill to select that area. Click again (or click the active pill) to deselect. Only one area can be active at a time. The selection is saved in `localStorage` and persists across page reloads. + +## Area Map tool + +The Area Map is a visual debug and builder tool served at `/area-map.html` on your CoreScope instance. + +### Viewing existing areas + +1. Open `/area-map.html` in your browser. +2. Leave the server field empty (uses the current origin) and click **Load**. +3. Each configured area is drawn as a colored polygon on the map. +4. Colored dots show nodes that the server returns when that area is selected — this is what the filter actually returns, so you can verify the boundaries are correct. +5. Use the checkboxes in the sidebar to toggle individual areas on or off. +6. Enable **All nodes (grey)** to overlay every node with GPS — nodes outside all areas appear grey, making it easy to spot incorrectly excluded or included nodes. + +### Drawing a new area + +1. Fill in **Key** (e.g. `ANT`) and **Label** (e.g. `Antwerp`) in the sidebar. +2. Click **Draw** — the cursor turns to a crosshair. +3. Click on the map to add polygon vertices. The polygon updates after each click. +4. Use **↩ Undo** to remove the last point, **✕ Clear** to start over. +5. When satisfied, the JSON snippet in the output box is ready to copy: + +```json +"ANT": { + "label": "Antwerp", + "polygon": [[51.28, 4.20], [51.28, 4.55], [51.10, 4.55], [51.10, 4.20]] +} +``` + +6. Paste this entry into the `areas` object in `config.json` and restart the server. + +## API + +``` +GET /api/config/areas +``` + +Returns the list of configured area keys and labels (no polygon data). Used by the frontend to build the pill bar. + +``` +GET /api/config/areas/polygons +``` + +Returns full area definitions including polygon coordinates. Used by the Area Map tool. + +Both endpoints require no authentication. diff --git a/docs/user-guide/configuration.md b/docs/user-guide/configuration.md index eda7910d..ae9761bd 100644 --- a/docs/user-guide/configuration.md +++ b/docs/user-guide/configuration.md @@ -189,6 +189,27 @@ Restricts ingestion and API responses to nodes within the polygon plus a buffer See [Geographic Filtering](geofilter.md) for the full guide including the visual polygon builder and the prune script for cleaning up historical data. +## Areas + +```json +"areas": { + "BAY": { + "label": "Bay Area", + "polygon": [[37.90, -122.55], [37.90, -121.75], [37.25, -121.75], [37.25, -122.55]] + }, + "SJC": { + "label": "San Jose", + "latMin": 37.20, "latMax": 37.45, "lonMin": -122.05, "lonMax": -121.75 + } +} +``` + +GPS-based display filter. When configured, a pill bar appears in the dashboard letting users scope packets, nodes, and analytics to nodes physically located within a named area. Attribution is based on the transmitting node's own GPS coordinates — not the observer's location. + +Each entry supports a `polygon` (array of `[lat, lon]` pairs) or a bounding box (`latMin`/`latMax`/`lonMin`/`lonMax`). Remove the block to disable the area filter UI. + +See [Area Filter](area-filter.md) for the full guide including the visual builder tool. + ## Home page The `home` section customizes the onboarding experience. See `config.example.json` for the full structure including `steps`, `checklist`, and `footerLinks`. diff --git a/docs/user-guide/nodes.md b/docs/user-guide/nodes.md index f0697c9a..c995d522 100644 --- a/docs/user-guide/nodes.md +++ b/docs/user-guide/nodes.md @@ -38,6 +38,10 @@ Type in the search box to filter by name or public key. The filter applies insta Filter to show only active, degraded, or silent nodes. +### Area filter + +If [areas are configured](area-filter.md), an area pill bar appears above the list. Selecting an area shows only nodes whose GPS position falls within that area. + ### Last heard filter Filter nodes by how recently they were heard (e.g., last hour, last 24h). diff --git a/docs/user-guide/packets.md b/docs/user-guide/packets.md index 2763f027..6221965c 100644 --- a/docs/user-guide/packets.md +++ b/docs/user-guide/packets.md @@ -32,6 +32,10 @@ Select a specific observer to see only packets it captured. Saved across session Filter by packet type (e.g., show only Adverts or Channel Messages). +### Area filter + +If [areas are configured](area-filter.md), an area pill bar appears above the packet list. Selecting an area shows only packets where the transmitting node's GPS position falls within that area. + ### Time window Choose how far back to look: 15 minutes, 1 hour, 6 hours, 24 hours, etc. On mobile, the window is capped at 3 hours for performance. diff --git a/public/analytics.js b/public/analytics.js index 36fe90c6..5dc638e7 100644 --- a/public/analytics.js +++ b/public/analytics.js @@ -75,6 +75,7 @@

📊 Mesh Analytics

Deep dive into your mesh network data

+
@@ -96,6 +97,14 @@
`; + // Tabs where the area filter is meaningful (transmitter GPS attribution) + const AREA_FILTER_TABS = new Set(['overview', 'rf', 'topology', 'hashsizes', 'collisions', 'nodes', 'clock-health']); + + function setAreaFilterVisibility(tab) { + const el = document.getElementById('analyticsAreaFilter'); + if (el) el.style.display = AREA_FILTER_TABS.has(tab) ? '' : 'none'; + } + // Tab handling const analyticsTabs = document.getElementById('analyticsTabs'); initTabBar(analyticsTabs); @@ -105,6 +114,7 @@ document.querySelectorAll('.tab-btn').forEach(b => b.classList.remove('active')); btn.classList.add('active'); _currentTab = btn.dataset.tab; + setAreaFilterVisibility(_currentTab); renderTab(_currentTab); }); @@ -121,7 +131,10 @@ } RegionFilter.init(document.getElementById('analyticsRegionFilter')); + AreaFilter.init(document.getElementById('analyticsAreaFilter')); + setAreaFilterVisibility(_currentTab); RegionFilter.onChange(function () { loadAnalytics(); }); + AreaFilter.onChange(function () { loadAnalytics(); }); // Delegated click/keyboard handler for clickable table rows const analyticsContent = document.getElementById('analyticsContent'); @@ -151,12 +164,14 @@ try { _analyticsData = {}; const rqs = RegionFilter.regionQueryString(); - const sep = rqs ? '?' + rqs.slice(1) : ''; + const aqs = AreaFilter.areaQueryString(); + const sep = (rqs + aqs) ? '?' + (rqs + aqs).slice(1) : ''; + const sepNoArea = rqs ? '?' + rqs.slice(1) : ''; const [hashData, rfData, topoData, chanData, collisionData] = await Promise.all([ api('/analytics/hash-sizes' + sep, { ttl: CLIENT_TTL.analyticsRF }), api('/analytics/rf' + sep, { ttl: CLIENT_TTL.analyticsRF }), api('/analytics/topology' + sep, { ttl: CLIENT_TTL.analyticsRF }), - api('/analytics/channels' + sep, { ttl: CLIENT_TTL.analyticsRF }), + api('/analytics/channels' + sepNoArea, { ttl: CLIENT_TTL.analyticsRF }), api('/analytics/hash-collisions' + sep, { ttl: CLIENT_TTL.analyticsRF }), ]); _analyticsData = { hashData, rfData, topoData, chanData, collisionData }; @@ -1777,7 +1792,7 @@ async function renderNodesTab(el) { el.innerHTML = '
Loading node analytics…
'; try { - const rq = RegionFilter.regionQueryString(); + const rq = RegionFilter.regionQueryString() + AreaFilter.areaQueryString(); const [nodesResp, bulkHealth] = await Promise.all([ api('/nodes?limit=10000&sortBy=lastSeen' + rq, { ttl: CLIENT_TTL.nodeList }), api('/nodes/bulk-health?limit=50' + rq, { ttl: CLIENT_TTL.analyticsRF }) @@ -2509,7 +2524,7 @@ function destroy() { _analyticsData = {}; _channelData = null; if (_ngState && _ async function renderPrefixTool(el) { el.innerHTML = '
Loading prefix data…
'; - const rq = RegionFilter.regionQueryString(); + const rq = RegionFilter.regionQueryString() + AreaFilter.areaQueryString(); const regionLabel = rq ? (new URLSearchParams(rq.slice(1)).get('region') || '') : ''; let nodesResp; @@ -3464,7 +3479,8 @@ function destroy() { _analyticsData = {}; _channelData = null; if (_ngState && _ async function renderClockHealthTab(el) { el.innerHTML = '
Loading clock health data…
'; try { - var data = await (await fetch('/api/nodes/clock-skew')).json(); + const aqs = AreaFilter.areaQueryString(); + var data = await (await fetch('/api/nodes/clock-skew' + (aqs ? '?' + aqs.slice(1) : ''))).json(); if (!Array.isArray(data) || !data.length) { el.innerHTML = '
No clock skew data available. Nodes need recent adverts for clock analysis.
'; return; diff --git a/public/area-filter.js b/public/area-filter.js new file mode 100644 index 00000000..34e08002 --- /dev/null +++ b/public/area-filter.js @@ -0,0 +1,129 @@ +/* === CoreScope — area-filter.js (single-select area filter component) === */ +'use strict'; + +(function () { + var LS_KEY = 'meshcore-area-filter'; + var _areas = []; // [{key, label}, ...] + var _selected = null; // selected area key string, or null = all + var _listeners = []; + var _container = null; + var _loaded = false; + + function loadFromStorage() { + try { + var v = localStorage.getItem(LS_KEY); + if (v) return v; + } catch (e) {} + return null; + } + + function saveToStorage() { + if (!_selected) { + localStorage.removeItem(LS_KEY); + } else { + localStorage.setItem(LS_KEY, _selected); + } + } + + _selected = loadFromStorage(); + + async function fetchAreas() { + if (_loaded) return _areas; + try { + var data = await fetch('/api/config/areas').then(function (r) { return r.json(); }); + _areas = Array.isArray(data) ? data : []; + _loaded = true; + if (_selected && !_areas.some(function (a) { return a.key === _selected; })) { + _selected = null; + saveToStorage(); + } + } catch (e) { + _areas = []; + } + return _areas; + } + + function getSelected() { return _selected; } + function getAreaParam() { return _selected || ''; } + function areaQueryString() { return _selected ? '&area=' + encodeURIComponent(_selected) : ''; } + + function triggerLabel() { + if (!_selected) return 'Area: All ▾'; + var area = _areas.find(function (a) { return a.key === _selected; }); + return 'Area: ' + (area ? area.label : _selected) + ' ▾'; + } + + function render(container) { + if (_areas.length === 0) { + container.innerHTML = ''; + container.style.display = 'none'; + return; + } + container.style.display = ''; + + if (container._areaCleanup) { container._areaCleanup(); container._areaCleanup = null; } + + var html = '
'; + html += ''; + html += '
'; + container.innerHTML = html; + + var trigger = container.querySelector('.region-dropdown-trigger'); + var menu = container.querySelector('.area-dropdown-menu'); + + trigger.onclick = function (e) { + e.stopPropagation(); + var open = !menu.hidden; + menu.hidden = open; + trigger.setAttribute('aria-expanded', String(!open)); + }; + + menu.onclick = function (e) { + var btn = e.target.closest('[data-area]'); + if (!btn) return; + _selected = (btn.dataset.area === '__all__') ? null : btn.dataset.area; + saveToStorage(); + render(container); + _listeners.forEach(function (fn) { fn(_selected); }); + }; + + function onDocClick(e) { + if (!container.contains(e.target)) { + menu.hidden = true; + trigger.setAttribute('aria-expanded', 'false'); + } + } + document.addEventListener('click', onDocClick, true); + container._areaCleanup = function () { + document.removeEventListener('click', onDocClick, true); + }; + } + + function onChange(fn) { _listeners.push(fn); return fn; } + function offChange(fn) { _listeners = _listeners.filter(function (f) { return f !== fn; }); } + + async function initFilter(container) { + _container = container; + await fetchAreas(); + render(container); + } + + window.AreaFilter = { + init: initFilter, + render: render, + getSelected: getSelected, + getAreaParam: getAreaParam, + areaQueryString: areaQueryString, + onChange: onChange, + offChange: offChange, + }; +})(); diff --git a/public/area-map.html b/public/area-map.html new file mode 100644 index 00000000..958748ea --- /dev/null +++ b/public/area-map.html @@ -0,0 +1,353 @@ + + + + + +Area Map + + + + + + +
+

Area Map

+
+ + + +
+ Click Load to fetch areas and nodes +
+ +
+ + +
+
+ + + + diff --git a/public/channels.js b/public/channels.js index aa14cd9c..218b61cf 100644 --- a/public/channels.js +++ b/public/channels.js @@ -446,7 +446,7 @@ // Fetch packets from API — get all payload_type=5 (GRP_TXT/CHAN) var rp = RegionFilter.getRegionParam(); - var qs = rp ? '®ion=' + encodeURIComponent(rp) : ''; + var qs = (rp ? '®ion=' + encodeURIComponent(rp) : ''); var data; try { data = await api('/packets?limit=1000&payloadType=5' + qs, { ttl: 10000 }); @@ -671,7 +671,6 @@ await refreshMessages({ regionSwitch: true, forceNoCache: true }); }); }); - // Channel key input handler (#725 M2, improved UX #759) var chKeyForm = document.getElementById('chKeyForm'); if (chKeyForm) { @@ -1258,7 +1257,7 @@ const requestHash = selectedHash; const rp = RegionFilter.getRegionParam() || ''; const request = beginMessageRequest(requestHash, rp); - const regionQs = rp ? '®ion=' + encodeURIComponent(rp) : ''; + const regionQs = (rp ? '®ion=' + encodeURIComponent(rp) : ''); const data = await api(`/channels/${encodeURIComponent(requestHash)}/messages?limit=200${regionQs}`, { ttl: CLIENT_TTL.channelMessages, bust: !!opts.forceNoCache }); if (isStaleMessageRequest(request)) return; const newMsgs = data.messages || []; diff --git a/public/index.html b/public/index.html index 1187e0ce..967076ca 100644 --- a/public/index.html +++ b/public/index.html @@ -88,6 +88,7 @@ + diff --git a/public/live.css b/public/live.css index b1664105..64dce27c 100644 --- a/public/live.css +++ b/public/live.css @@ -319,8 +319,8 @@ .feed-show-btn { display: none !important; } .live-legend { display: none !important; } .legend-toggle-btn { display: none !important; } - .live-header { - flex-wrap: wrap; gap: 6px; padding: 6px 10px; + .live-header { + gap: 6px; padding: 6px 10px; top: 56px; left: 8px; right: 8px; max-width: calc(100vw - 16px); } .live-stats-row { flex-wrap: wrap; gap: 4px; } diff --git a/public/live.js b/public/live.js index c9391cf8..ed372997 100644 --- a/public/live.js +++ b/public/live.js @@ -835,6 +835,7 @@ Show only favorited and claimed nodes +
+
@@ -818,11 +821,13 @@ // Init shared RegionFilter component RegionFilter.init(document.getElementById('packetsRegionFilter'), { dropdown: true }); + AreaFilter.init(document.getElementById('packetsAreaFilter')); if (_pendingUrlRegion) { RegionFilter.setSelected(_pendingUrlRegion.split(',').filter(Boolean)); _pendingUrlRegion = null; } RegionFilter.onChange(function() { updatePacketsUrl(); loadPackets(); }); + AreaFilter.onChange(function() { updatePacketsUrl(); loadPackets(); }); // --- Packet Filter Language --- (function() { diff --git a/public/style.css b/public/style.css index db9c5d72..84623922 100644 --- a/public/style.css +++ b/public/style.css @@ -1714,22 +1714,25 @@ tr[data-hops]:hover { background: rgba(59,130,246,0.1); } .perf-table th, .perf-table td { padding: 4px 6px; } } -/* ─── Region filter bar ─── */ +/* ─── Region / Area filter bars ─── */ .region-filter-bar { display: flex; flex-wrap: wrap; gap: 6px; padding: 8px 0; } +.area-dropdown-menu { min-width: 160px; } +.area-dropdown-item.area-item-active { color: var(--accent); font-weight: 600; } .region-filter-container { margin: 0; padding: 0; display: inline-flex; align-items: center; } .region-pill { display: inline-flex; align-items: center; padding: 4px 12px; border-radius: 16px; font-size: 12px; font-weight: 500; cursor: pointer; border: 1.5px solid var(--border); - background: transparent; color: var(--text-muted); transition: all 0.15s; + background: transparent; color: var(--text-muted); transition: all 0.15s; white-space: nowrap; + flex-shrink: 0; } .region-pill:hover { border-color: var(--accent); color: var(--accent); } -.region-pill-active { +button.region-pill-active { background: var(--accent); color: #fff; border-color: var(--accent); } -.region-pill-active:hover { opacity: 0.85; } +button.region-pill-active:hover { opacity: 0.85; color: #fff; } .region-filter-label { font-size: 12px; font-weight: 600; color: var(--text-muted); align-self: center; - margin-right: 2px; user-select: none; + margin-right: 2px; user-select: none; white-space: nowrap; flex-shrink: 0; } .region-dropdown-wrap { position: relative; display: inline-flex; align-items: center; } .region-dropdown-trigger { diff --git a/tools/area-map.html b/tools/area-map.html new file mode 100644 index 00000000..233377fe --- /dev/null +++ b/tools/area-map.html @@ -0,0 +1,264 @@ + + + + + +Area Map Debug + + + + + + +
+

Area Map Debug

+
+ + + +
+ Enter server URL and click Load +
+ +
+ +
+
+ + + + diff --git a/tools/geofilter-builder.html b/tools/geofilter-builder.html index addb7e69..2f5ad7da 100644 --- a/tools/geofilter-builder.html +++ b/tools/geofilter-builder.html @@ -8,28 +8,28 @@ @@ -61,8 +61,8 @@

GeoFilter Builder