forked from google-research/pegasus
-
Notifications
You must be signed in to change notification settings - Fork 1
/
setup.py
59 lines (55 loc) · 1.93 KB
/
setup.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
# Copyright 2020 The PEGASUS Authors..
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Install Pegasus."""
import setuptools
# Get the long description from the README file.
with open('README.md') as fp:
_LONG_DESCRIPTION = fp.read()
setuptools.setup(
name='pegasus',
version='0.0.1',
description='Pretraining with Extracted Gap Sentences for Abstractive Summarization with Sequence-to-sequence model',
long_description=_LONG_DESCRIPTION,
long_description_content_type='text/markdown',
author='Google Inc.',
author_email='[email protected]',
url='http://github.com/google-research/pegasus',
license='Apache 2.0',
packages=setuptools.find_packages(),
package_data={},
scripts=[],
install_requires=[
'absl-py',
'mock',
'numpy',
'rouge-score',
'sacrebleu',
'sentencepiece',
'tensorflow-text==1.15.0rc0',
'tfds-nightly',
'tensor2tensor==1.15.0',
'tensorflow-gpu==1.15.2',
],
extras_require={
'tensorflow': ['tensorflow-gpu==1.15.2'],
},
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: Apache Software License',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
],
keywords='deeplearning machinelearning nlp summarization transformer pretraining',
)