This repository was archived by the owner on Sep 27, 2022. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathsetup.py
More file actions
59 lines (52 loc) · 1.8 KB
/
setup.py
File metadata and controls
59 lines (52 loc) · 1.8 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
import codecs
import os
from setuptools import find_packages, setup
here = os.path.abspath(os.path.dirname(__file__))
with codecs.open(os.path.join(here, "README.md"), encoding="utf-8") as fh:
long_description = "\n" + fh.read()
VERSION = "0.0.5"
DESCRIPTION = "Wikipedia HTML Dump Parsing"
LONG_DESCRIPTION = (
"A package that supports plaintext and object extraction from Wikipedia HTML dumps."
)
# Dev dependencies
EXTRAS_REQUIRE = {
"tests": ["pytest>=6.2.5"],
"pre-commit": ["pre-commit"],
"typing": ["mypy>=0.961"],
}
EXTRAS_REQUIRE["dev"] = (
EXTRAS_REQUIRE["tests"] + EXTRAS_REQUIRE["pre-commit"] + EXTRAS_REQUIRE["typing"]
)
# Setting up
setup(
name="mwparserfromhtml",
version=VERSION,
author="Appledora & Isaac Johnson & Martin Gerlach",
author_email="<isaac@wikimedia.org>",
url="https://gitlab.wikimedia.org/repos/research/html-dumps",
license="MIT License",
description=DESCRIPTION,
long_description_content_type="text/markdown",
long_description=long_description,
package_dir={"": "src"},
packages=find_packages(where="src"),
install_requires=["beautifulsoup4", "requests"],
keywords=["python", "wikipedia", "html"],
classifiers=[
"Development Status :: 4 - Beta",
"License :: OSI Approved :: MIT License",
"Natural Language :: English",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Operating System :: Unix",
"Operating System :: MacOS :: MacOS X",
"Operating System :: Microsoft :: Windows",
],
extras_require=EXTRAS_REQUIRE,
include_package_data=True,
zip_safe=False,
)