-
Notifications
You must be signed in to change notification settings - Fork 5
Expand file tree
/
Copy pathapp.js
More file actions
76 lines (63 loc) · 2.17 KB
/
app.js
File metadata and controls
76 lines (63 loc) · 2.17 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
const Crawler = require('./src/crawler')
const { URL } = require('url')
const { orderBy } = require('lodash/collection')
const crawler = new Crawler()
const args = process.argv.slice(2)
const report = (crawler) => {
const blockStats = {}
const versionStats = {}
const nodes = Object.values(crawler.nodes)
for (const item of nodes) {
if (item.height === undefined || item.id === undefined) {
continue
}
if (blockStats[item.height]) {
blockStats[item.height].count += 1
blockStats[item.height].ids[item.id] += 1
} else {
blockStats[item.height] = {}
blockStats[item.height].count = 1
blockStats[item.height].height = item.height
// todo block ids
blockStats[item.height].ids = {}
blockStats[item.height].ids[item.id] = 1
}
if (versionStats[item.version]) {
versionStats[item.version].count += 1
} else {
versionStats[item.version] = {
count: 1,
version: item.version
}
}
}
console.log('===========================================')
console.log(`All nodes: ${Object.keys(crawler.nodes).length}`)
console.log(`Nodes online: ${crawler.heights.length}`)
console.log(`Nodes offline: ${Object.keys(crawler.nodes).length - crawler.heights.length}`)
// height/block stats
console.log('')
console.log('Height and block stats:')
for (const stat of orderBy(Object.values(blockStats), ['height'], ['desc'])) {
console.log(` ${stat.height} with ${stat.count} nodes. Block hashes:`)
for (const hash in stat.ids) {
console.log(` - ${hash} (${stat.ids[hash]} nodes)`)
}
}
// version stats
console.log('')
console.log('Version stats:')
for (const stat of orderBy(Object.values(versionStats), ['version'], ['desc'])) {
console.log(` - ${stat.version} on ${stat.count} nodes`)
}
console.log('------------------------------------------')
console.log(`Finished scanning in ${new Date() - crawler.startTime}ms`)
process.exit(0)
}
const node = { ip: undefined, port: undefined }
if (args.length >= 1) {
const url = new URL(args[0])
node.ip = url.hostname
node.port = url.port
}
crawler.run(node).then(report).catch(err => console.error(err))