-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathcitations.bib
More file actions
218 lines (175 loc) · 9.65 KB
/
citations.bib
File metadata and controls
218 lines (175 loc) · 9.65 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
@misc{bluecost1,
title = {beaconstac},
howpublished = {\url{https://blog.beaconstac.com/2016/05/li-fi-vs-wi-fi-vs-ibeacon-ble-technology/}},
note = {Accessed: 2016-12-25}
}
@misc{bluecost2,
title = {infoworld},
howpublished = {\url{http://www.infoworld.com/article/2983166/bluetooth/beacons-are-harder-to-deploy-than-you-think.html}},
note = {Accessed: 2016-12-25}
}
@misc{bluecost3,
title = {pointrlabs},
howpublished = {\url{http://www.pointrlabs.com/blog/beacons-everything-you-need-to-know/}},
note = {Accessed: 2016-12-25}
}
@misc{bluecost4,
title = {nodesagency},
howpublished = {\url{https://www.nodesagency.com/list-9-biggest-beacon-manufacturers/}},
note = {Accessed: 2016-12-25}
}
@article{coursera,
title= {Stanford CS229 - Machine Learning - Ng},
journal= {},
author= {Andrew Ng},
year= {2008},
url= {},
license= {},
abstract= {#Course Description
This course provides a broad introduction to machine learning and statistical pattern recognition. Topics include: supervised learning (generative/discriminative learning, parametric/non-parametric learning, neural networks, support vector machines); unsupervised learning (clustering, dimensionality reduction, kernel methods); learning theory (bias/variance tradeoffs; VC theory; large margins); reinforcement learning and adaptive control. The course will also discuss recent applications of machine learning, such as to robotic control, data mining, autonomous navigation, bioinformatics, speech recognition, and text and web data processing.
#Prerequisites
Students are expected to have the following background:
Knowledge of basic computer science principles and skills, at a level sufficient to write a reasonably non-trivial computer program.
Familiarity with the basic probability theory. (CS109 or Stat116 is sufficient but not necessary.)
Familiarity with the basic linear algebra (any one of Math 51, Math 103, Math 113, or CS 205 would be much more than necessary.)
Introduction (1 class)
* Basic concepts.
Supervised learning. (7 classes)
* Supervised learning setup. LMS.
* Logistic regression. Perceptron. Exponential family.
* Generative learning algorithms. Gaussian discriminant analysis. Naive Bayes.
* Support vector machines.
* Model selection and feature selection.
* Ensemble methods: Bagging, boosting.
* Evaluating and debugging learning algorithms.
Learning theory. (3 classes)
* Bias/variance tradeoff. Union and Chernoff/Hoeffding bounds.
* VC dimension. Worst case (online) learning.
* Practical advice on how to use learning algorithms.
Unsupervised learning. (5 classes)
* Clustering. K-means.
* EM. Mixture of Gaussians.
* Factor analysis.
* PCA (Principal components analysis).
* ICA (Independent components analysis).
Reinforcement learning and control. (4 classes)
* MDPs. Bellman equations.
* Value iteration and policy iteration.
* Linear quadratic regulation (LQR). LQG.
* Q-learning. Value function approximation.
* Policy search. Reinforce. POMDPs.
},
keywords= {machine learning, statistics, Regression},
terms= {}
}
@book{statbook,
author = {James, Gareth and Witten, Daniela and Hastie, Trevor and Tibshirani, Robert},
title = {An Introduction to Statistical Learning: With Applications in R},
year = {2014},
isbn = {1461471370, 9781461471370},
publisher = {Springer Publishing Company, Incorporated}
}
@INPROCEEDINGS{uji,
author={J. Torres-Sospedra and R. Montoliu and A. Martinez-Usó and J. P. Avariento and T. J. Arnau and M. Benedito-Bordonau and J. Huerta},
booktitle={Indoor Positioning and Indoor Navigation (IPIN), 2014 International Conference on},
title={UJIIndoorLoc: A new multi-building and multi-floor database for WLAN fingerprint-based indoor localization problems},
year={2014},
pages={261-270},
keywords={database management systems;mobile computing;wireless LAN;UJIIndoorLoc;WLAN fingerprint;WLAN fingerprinting;indoor localization problems;mobile computing;mobile sensing community;multibuilding database;multifloor database;multifloor localization database;research community;Buildings;Databases;Smart phones;Training;Wireless LAN;Wireless application protocol},
doi={10.1109/IPIN.2014.7275492},
month={Oct},
}
@Article{Nagi2013,
author="Nagi, Sajid
and Bhattacharyya, Dhruba Kr.",
title="Classification of microarray cancer data using ensemble approach",
journal="Network Modeling Analysis in Health Informatics and Bioinformatics",
year="2013",
volume="2",
number="3",
pages="159--173",
abstract="An ensemble of classifiers is created by combining predictions of multiple component classifiers for improving prediction performance. In this paper, we conduct experimental comparison of J48, NB, IBK on nine microarray cancer datasets and also analyze their performance with Bagging, Boosting and Stack Generalization. The experimental results show that all ensemble methods outperform the individual classification methods. We then present a method, referred to as SD-EnClass, for combining classifiers from different classification families into an ensemble, based on a simple estimation of each classifier's class performance. The experimental results show that the proposed model improves classification accuracy, in comparison to simply selecting the best classifier in the combination. In the second stage, we combine the results of our proposed method with the results of Boosting, Bagging and Stacking using the combining method proposed, to obtain results which are significantly better than using Boosting, Bagging or Stacking alone.",
issn="2192-6670",
doi="10.1007/s13721-013-0034-x",
url="http://dx.doi.org/10.1007/s13721-013-0034-x"
}
@INPROCEEDINGS{comparative,
author={S. Bozkurt and G. Elibol and S. Gunal and U. Yayan},
booktitle={Innovations in Intelligent SysTems and Applications (INISTA), 2015 International Symposium on},
title={A comparative study on machine learning algorithms for indoor positioning},
year={2015},
pages={1-8},
keywords={RSSI;decision trees;indoor navigation;learning (artificial intelligence);pattern classification;AdaBoost ensemble algorithms;RSS values;UJIIndoorLoc indoor positioning database;bagging ensemble algorithms;decision tree classifier;fingerprinting based positioning;indoor positioning systems;k-NN algorithm;k-nearest neighbor algorithm;machine learning algorithms;position estimation;radio map;received signal strength values;Accuracy;Classification algorithms;Decision trees;Floors;Machine learning algorithms;Training;AdaBoost;Bagging;Bayes Net;Localization;Naïve Bayes;RF Map;Received Signal Strength (RSS);SMO;WEKA;classification;decision tree (J48);indoor positioning;machine learning algorithms;nearest neighbor (NN)},
doi={10.1109/INISTA.2015.7276725},
month={Sept},}
@book{quinlan,
address = {San Mateo, CA},
author = {Ross Quinlan},
publisher = {Morgan Kaufmann Publishers},
title = {C4.5: Programs for Machine Learning},
year = {1993}
}
@inproceedings{Freund,
address = {San Francisco},
author = {Yoav Freund and Robert E. Schapire},
booktitle = {Thirteenth International Conference on Machine Learning},
pages = {148-156},
publisher = {Morgan Kaufmann},
title = {Experiments with a new boosting algorithm},
year = {1996}
}
@article{comparativeEN,
author = {Richard Maclin and
David W. Opitz},
title = {Popular Ensemble Methods: An Empirical Study},
journal = {CoRR},
volume = {abs/1106.0257},
year = {2011},
url = {http://arxiv.org/abs/1106.0257},
timestamp = {Mon, 05 Dec 2011 18:05:33 +0100},
biburl = {http://dblp.uni-trier.de/rec/bib/journals/corr/abs-1106-0257},
bibsource = {dblp computer science bibliography, http://dblp.org}
}
@MISC{explainingadaboost,
author = {Robert E. Schapire},
title = {Explaining AdaBoost},
year = {2013}
}
@MISC{adaboost,
author = {Yoav Freund and Robert E. Schapire},
title = { A Decision-Theoretic Generalization of on-Line Learning and an Application to Boosting },
year = {1996}
}
@article{dapokemon,
title={Pok{\'e}mon Go: imers{\~a}o, publicidade e ludicidade em um novo modelo de compra e inser{\c{c}}{\~a}o de m{\'\i}dia},
author={da Hora PIMENTA, Rodrigo Duguay},
year = {2016}
}
@INPROCEEDINGS{7471364,
author={W. Bulten and A. C. V. Rossum and W. F. G. Haselager},
booktitle={2016 IEEE First International Conference on Internet-of-Things Design and Implementation (IoTDI)},
title={Human SLAM, Indoor Localisation of Devices and Users},
year={2016},
pages={211-222},
keywords={RSSI;data privacy;indoor environment;ubiquitous computing;FastSLAM;RSSI update;SLAC algorithm;device RSSI;device indoor localisation;device location;device position;environment noise;human SLAM;nontrivial environment;received signal strength indicator;simultaneous localisation and configuration;smart space;user indoor localisation;user motion data;user privacy;Estimation;Performance evaluation;Privacy;Simultaneous localization and mapping;Privacy;Simultaneous localization and mapping;Smart Homes;Ubiquitous computing;Wireless sensor networks},
doi={10.1109/IoTDI.2015.19},
month={April},}
@inproceedings{provos1999bcrypt,
title={Bcrypt algorithm},
author={Provos, Niels and Mazieres, David},
year={1999},
organization={USENIX}
}
@article{evaluation,
author = {Kevin Curran and Eoghan Furey and Tom Lunney and Jose Santos and Derek Woods and Aiden McCaughey},
title = {An evaluation of indoor location determination technologies},
journal = {Journal of Location},
volume = {Vol. 5, No. 2,},
year = {2011}
}
@article{situationIndoor,
author = {Deng Zhongliang and Yu Yanpei and Yuan Xie and Wan Neng and Yang Lei},
title = {Situation and Development Tendency of Indoor Positioning},
journal = {China Communications},
year = {2014}
}