Toggle navigation
Toggle navigation
This project
Loading...
Sign in
graykode
/
commit-autosuggestions
Go to a project
Toggle navigation
Toggle navigation pinning
Projects
Groups
Snippets
Help
Project
Activity
Repository
Pipelines
Graphs
Issues
0
Merge Requests
0
Wiki
Snippets
Network
Create a new issue
Builds
Commits
Issue Boards
Authored by
graykode
2020-11-02 20:06:42 +0900
Browse Files
Options
Browse Files
Download
Email Patches
Plain Diff
Commit
4c9d986804b1616cce62774fa8eae296b2add337
4c9d9868
1 parent
26618a00
(refactor) folder naming and path
Hide whitespace changes
Inline
Side-by-side
Showing
7 changed files
with
6 additions
and
6 deletions
gitcloner.py → src/preprocess/gitcloner.py
gitparser.py → src/preprocess/gitparser.py
repositories.txt → src/preprocess/repositories.txt
code2nl/bleu.py → src/train/bleu.py
code2nl/customized_roberta.py → src/train/customized_roberta.py
code2nl/model.py → src/train/model.py
code2nl/run.py → src/train/run.py
gitcloner.py
→
src/preprocess/
gitcloner.py
View file @
4c9d986
File moved
gitparser.py
→
src/preprocess/
gitparser.py
View file @
4c9d986
File moved
repositories.txt
→
src/preprocess/
repositories.txt
View file @
4c9d986
File moved
code2nl
/bleu.py
→
src/train
/bleu.py
View file @
4c9d986
File moved
code2nl
/customized_roberta.py
→
src/train
/customized_roberta.py
View file @
4c9d986
File moved
code2nl
/model.py
→
src/train
/model.py
View file @
4c9d986
...
...
@@ -3,9 +3,7 @@
import
torch
import
torch.nn
as
nn
import
torch
from
torch.autograd
import
Variable
import
copy
class
Seq2Seq
(
nn
.
Module
):
"""
Build Seqence-to-Sequence.
...
...
@@ -162,7 +160,7 @@ class Beam(object):
# bestScoresId is flattened beam x word array, so calculate which
# word and beam each score came from
prevK
=
bestScoresId
/
numWords
prevK
=
bestScoresId
/
/
numWords
self
.
prevKs
.
append
(
prevK
)
self
.
nextYs
.
append
((
bestScoresId
-
prevK
*
numWords
))
...
...
code2nl
/run.py
→
src/train
/run.py
View file @
4c9d986
...
...
@@ -22,7 +22,6 @@ using a masked language modeling (MLM) loss.
from
__future__
import
absolute_import
import
os
import
sys
import
bleu
import
pickle
import
torch
import
json
...
...
@@ -35,11 +34,14 @@ from itertools import cycle
import
torch.nn
as
nn
from
model
import
Seq2Seq
from
tqdm
import
tqdm
,
trange
from
customized_roberta
import
RobertaModel
from
torch.utils.data
import
DataLoader
,
Dataset
,
SequentialSampler
,
RandomSampler
,
TensorDataset
from
torch.utils.data.distributed
import
DistributedSampler
from
transformers
import
(
WEIGHTS_NAME
,
AdamW
,
get_linear_schedule_with_warmup
,
RobertaConfig
,
RobertaTokenizer
)
import
train.bleu
as
bleu
from
train.customized_roberta
import
RobertaModel
MODEL_CLASSES
=
{
'roberta'
:
(
RobertaConfig
,
RobertaModel
,
RobertaTokenizer
)}
logging
.
basicConfig
(
format
=
'
%(asctime)
s -
%(levelname)
s -
%(name)
s -
%(message)
s'
,
...
...
Please
register
or
login
to post a comment