-
Notifications
You must be signed in to change notification settings - Fork 30
/
setup.py
128 lines (116 loc) · 3.44 KB
/
setup.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
# -*- coding: utf-8 -*-
#
# This file is part of hepcrawl.
# Copyright (C) 2015, 2016, 2017, 2018, 2019 CERN.
#
# hepcrawl is a free software; you can redistribute it and/or modify it
# under the terms of the Revised BSD License; see LICENSE file for
# more details.
"""Scrapy project for feeds into INSPIRE-HEP (http://inspirehep.net)."""
from __future__ import absolute_import, division, print_function
from setuptools import setup, find_packages
readme = open('README.rst').read()
install_requires = [
'automat==20.2.0',
'amqp~=2.0,>2.2.0,!=2.3.0',
'autosemver~=0.2',
"backports.tempfile==1.0",
'boto3~=1.14',
'dojson==1.4.0',
'inspire-schemas~=61.5',
'inspire-dojson~=63.0',
'inspire-utils~=3.0,>=3.0.0',
# newer scrapy is incompatible with old scrapyd
'Scrapy~=1.6,<1.7.0',
'scrapy-crawl-once~=0.1,>=0.1.1',
'scrapy-sentry~=0.0,>=0.8.0',
# TODO: unpin once they support wheel building again, needed for Python 3
'scrapyd==1.1.0',
'scrapyd-client>=1.0.1',
'six>=1.9.0',
'requests~=2.22,>=2.22.0',
'celery>=4.1',
'redis>=2.10.5',
'pyasn1>=0.1.8', # Needed for dependency resolving.
'LinkHeader>=0.4.3',
'furl>=0.4.95',
'ftputil>=3.3.1,<4.0',
'python-dateutil~=2.0,>=2.7.0',
'python-scrapyd-api>=2.0.1',
'harvestingkit>=0.6.12',
'Sickle~=0.6,>=0.6.2',
# newer versions seem incompatible with required scrapyd version
'Twisted~=18.0,>=18.9.0',
#latex parsing
'pylatexenc~=2.9',
'queuelib==1.5.0',
'sentry-sdk==1.3.0',
'structlog==20.1.0',
'python-logstash==0.4.8',
]
tests_require = [
'check-manifest>=0.25',
'coverage>=4.0',
'deepdiff==3.3.0',
'freezegun>=0.3.9',
'isort==4.2.2',
'mock~=2.0,>=2.0.0',
'pytest>=2.8.0',
'pytest-cov>=2.1.0',
'pytest-pep8>=1.0.6',
'requests-mock>=1.3.0',
'pydocstyle>=1.0.0',
'PyYAML',
]
extras_require = {
'docs': [
'docutils==0.17.1',
'Sphinx~=1.0,>=1.5',
'sphinxcontrib-napoleon>=0.6.1',
],
'tests': tests_require,
}
setup_requires = [
"autosemver~=0.0,>=0.5.2,<1.0.0",
]
extras_require['all'] = []
for name, reqs in extras_require.items():
extras_require['all'].extend(reqs)
URL = 'https://github.com/inspirehep/hepcrawl'
setup(
name='hepcrawl',
packages=find_packages(),
description=__doc__,
long_description=readme,
url=URL,
author="CERN",
author_email='[email protected]',
entry_points={'scrapy': ['settings = hepcrawl.settings']},
zip_safe=False,
include_package_data=True,
platforms='any',
autosemver={
'bugtracker_url': URL + '/issues/',
},
setup_requires=setup_requires,
install_requires=install_requires,
extras_require=extras_require,
package_data={
'hepcrawl': ['*.cfg'],
},
classifiers=[
'Intended Audience :: Developers',
'Environment :: Console',
'Framework :: Scrapy',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Software Development :: Libraries :: Application Frameworks',
'Topic :: Software Development :: Libraries :: Python Modules',
'Programming Language :: Python',
"Programming Language :: Python :: 2",
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
],
)