add documentation; add the original code source info

This commit is contained in:
Daisy Deng 2020-03-10 16:11:57 +00:00
Родитель 5b56f6e7d0
Коммит b9c8a987d3
13 изменённых файлов: 72 добавлений и 10 удалений

Просмотреть файл

@ -50,7 +50,7 @@ The following is a summary of the commonly used NLP scenarios covered in the rep
|-------------------------| ------------------- |-------|---|
|Text Classification |BERT, XLNet, RoBERTa| Text classification is a supervised learning method of learning and predicting the category or the class of a document given its text content. |English, Hindi, Arabic|
|Named Entity Recognition |BERT| Named entity recognition (NER) is the task of classifying words or key phrases of a text into predefined entities of interest. |English|
|Text Summarization|BERTSum <br> UniLM (s2s-ft)|Text summarization is a language generation task of summarizing the input text into a shorter paragraph of text.|English
|Text Summarization|BERTSumExt <br> BERTSumAbs <br> UniLM (s2s-ft)|Text summarization is a language generation task of summarizing the input text into a shorter paragraph of text.|English
|Entailment |BERT, XLNet, RoBERTa| Textual entailment is the task of classifying the binary relation between two natural-language texts, *text* and *hypothesis*, to determine if the *text* agrees with the *hypothesis* or not. |English|
|Question Answering |BiDAF, BERT, XLNet| Question answering (QA) is the task of retrieving or generating a valid answer for a given query in natural language, provided with a passage related to the query. |English|
|Sentence Similarity |BERT, GenSen| Sentence similarity is the process of computing a similarity score given a pair of text documents. |English|

Просмотреть файл

@ -18,7 +18,18 @@
}
},
"license": "Apache-2.0"
},
{
"component": {
"type": "git",
"git": {
"repositoryUrl": "https://github.com/nlpyang/PreSumm",
"commitHash": "2df3312582a3a014aacbc1be810841705c67d06e"
}
},
"license": "MIT License"
}
],
"Version": 1
}
}

Просмотреть файл

@ -356,7 +356,7 @@
"metadata": {},
"outputs": [],
"source": [
"generated_summaries[1]"
"generated_summaries[0]"
]
},
{
@ -365,7 +365,7 @@
"metadata": {},
"outputs": [],
"source": [
"reference_summaries[1]"
"reference_summaries[0]"
]
},
{

Просмотреть файл

@ -1,3 +1,9 @@
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
# This script reuses code from https://github.com/nlpyang/Presumm
""" Implementation of ADAM optimizer. """
import math
import torch
from torch.optim.optimizer import Optimizer

Просмотреть файл

@ -1,3 +1,9 @@
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
# This script reuses code from https://github.com/nlpyang/Presumm
""" Beam classes used in the beam search. """
from __future__ import division
import torch
from .penalties import PenaltyBuilder

Просмотреть файл

@ -1,5 +1,9 @@
"""
Implementation of "Attention is All You Need"
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
# This script reuses code from https://github.com/nlpyang/Presumm
"""
Decoder implementation of "Attention is All You Need"
"""
import torch

Просмотреть файл

@ -1,3 +1,8 @@
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
# This script reuses code from https://github.com/nlpyang/Presumm
"Encoder classes used in the BertSum models."
import math
import torch

Просмотреть файл

@ -1,3 +1,9 @@
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
# This script reuses code from https://github.com/nlpyang/Presumm
"""
This file handles the details of the loss function during training.

Просмотреть файл

@ -1,7 +1,12 @@
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
# This script reuses code from https://github.com/nlpyang/Presumm
"""
The BertSum models for both extractive and abstractive summarization.
"""
import sys
# sys.path.insert(0, "/dadendev/PreSumm2/PreSumm/src")
import copy
import torch

Просмотреть файл

@ -1,3 +1,8 @@
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
# This script reuses code from https://github.com/nlpyang/Presumm
import math
import torch

Просмотреть файл

@ -1,3 +1,8 @@
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
# This script reuses code from https://github.com/nlpyang/Presumm
""" Optimizers class """
import torch
import torch.optim as optim

Просмотреть файл

@ -1,3 +1,9 @@
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
# This script reuses code from https://github.com/nlpyang/Presumm
""" PenaltyBuilder Class used in prediction/translation """
from __future__ import division
import torch

Просмотреть файл

@ -1,4 +1,7 @@
#!/usr/bin/env python
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
# This script reuses code from https://github.com/nlpyang/Presumm
""" Translator Class and builder """
from __future__ import print_function
import codecs