forked from alex-ferragni/alex-ferragni.github.io
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathsize.js
More file actions
159 lines (120 loc) · 5.5 KB
/
size.js
File metadata and controls
159 lines (120 loc) · 5.5 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
/* Core function that will execute the whole size measure from scratch
*
*/
function executeCacheSearch() {
//parameters:
var minPower = 10;
var maxPower = 25;
/******* "Main" code, get parameters and launches execution *******/
d3.select("#plots").html("");//clear previous plots, if any
var measures = 2**7
var nIter = 150000;
var caches = [];
for(var power = minPower; power <= maxPower; ++power){ //for every power of two, take measures around this power of two, see if we get an increase in slope
cacheDetectionSize = 2**power;
//first calibrate the number of accesses, such that the fastest measures takes roughly 30 cycles
nIter = calibrateNIter(nIter, 30, function(nIter){
return detectCache(nIter, cacheDetectionSize, 2**3, true)[0];
});
//then see if there is a cache here
if (detectCache(nIter, cacheDetectionSize, measures, false)[1]){
caches.push(2**power);
}
}
console.log("Done computing. Searched for caches from "+(2**minPower)+"B to "+(2**maxPower)+"B inclusive.");
console.log("Potential caches shown in the graphs.");
console.log("Found "+caches.length+" caches:");
console.log(caches);
}
/* This function tries to detect wether a cache has the given size
*
* nIter: number of memory accesses for a single measure
* cacheDetectionSize: size which we are testing against
* steps: number of different sizes that will be tested (more = more data to deal with)
* warmup: whether this is a warmup or not (avoid some later computations if they are not needed)
*/
function detectCache(nIter, cacheDetectionSize, steps, warmup){
if(!warmup){
console.log("checking whether "+cacheDetectionSize+"B is the size of a cache...");
}
var low = cacheDetectionSize / 2; //lowest size tested
var high = cacheDetectionSize * 1.5; //highest size tested
var step = (high-low)/steps; //distance between two sizes tested
var junk = 0; //varaible against DCE
var max = 0; //additionaly, we keep track of minimum and maximum values, for the graphs and/or the calibration
var min = 2*20;
var results = new Array((high - low)/step); //array of measures taken
var mainArrAccessor = new Int32Array(new ArrayBuffer(high)); //main array that will be accessed. Represents the contiguous virtual memory, contains the address set
var accessors = new Array(high/4); //temporary array to prepare the mainArrAccessor
//prepare an accessor array containing indexes from 0 to numWays
//then shuffle it, it will be used to generate the order of accesses in our main array
for (var i=0; i<low/4; ++i){
accessors[i]=i;
}
shuffle(accessors, low/4);
//use the accessor array to put correct values in our main array
//after that, we can start by accessing i=mainArrAccessors[0], then follow the addresses contained (i = mainArrAccessor[i])
//the accesses will then be perforemd randomly and make a loop that accesses each address once
//this is to avoid any optimization, the browser cannot guess the next address, and they are not contiguous, so it cannot parallelize accesses or prefetch them
//we also use the content of the array in a variable we keep and print to prevent the accesses to be optimized away
var idx = accessors[0];
for(var i=0; i<low/4 - 1; ++i){
mainArrAccessor[idx] = accessors[i+1];
idx = accessors[i+1];
}
mainArrAccessor[idx] = accessors[0]; //don't forget the last address should point to the first one
//in order to avoid re-shuffling the entire array every time we increase its size, we keep track of the current max valid address in the array.
//every time we add another address, we "insert" it randomly in the current cycle, which gives another valid cycle, and still mostly looks random
var previousMaxIdx = low/4-1;
for (var size = low; size < high; size+=step){
//"insert" every address that is greater than the biggest valid address in the cycle, and update the max
for (var i=previousMaxIdx+1; i<size/4; ++i){
var randomIdx = Math.floor(Math.random() * (i-1))
mainArrAccessor[i] = mainArrAccessor[randomIdx];
mainArrAccessor[randomIdx] = i;
}
previousMaxIdx = size/4-1;
//this is where we take the measures
idx = 0;
//measure the current time
lastTick = curTick = performance.now();
while (lastTick == (curTick = performance.now()));
beginTick = curTick;
//then perform many memory accesses
for (var i=0; i<nIter; ++i){
idx = mainArrAccessor[idx];
junk += idx;
}
//and finally measure the current time again
endTick = performance.now();
//add thi measure to our array
results[(size-low)/step] = [size,endTick-beginTick];
//then update the min and max
if (max < endTick-beginTick){
max = endTick-beginTick;
}
if (min > endTick-beginTick){
min = endTick-beginTick;
}
if (!warmup){
console.log("for size "+size+"B, time = "+(endTick-beginTick)+", junk: "+junk);
console.log("(time per byte = "+((endTick-beginTick) / nIter)+")")
}
}
//can we detect a cache here?
var isCacheHere = false;
if (!warmup){
//first, compute the line under the curve (it will make the cache detection easier)
curve = lineUnderCurve(results);
//then try to see if it looks like there is a cache here
isCacheHere = detectCacheSize(curve);
if (isCacheHere){
console.log("Cache detected! Size = "+cacheDetectionSize+"B");
displayResults(results, null/*curve*/, null);
}
else{
console.log("No cache here...");
}
}
return [min,isCacheHere,junk];
}