Skip to content

Commit

Permalink
base commit
Browse files Browse the repository at this point in the history
  • Loading branch information
vasugr committed Nov 30, 2019
1 parent 57ca8a5 commit 90034a4
Show file tree
Hide file tree
Showing 185 changed files with 4,194,233 additions and 0 deletions.
2,140 changes: 2,140 additions & 0 deletions Untitled.ipynb

Large diffs are not rendered by default.

475 changes: 475 additions & 0 deletions Untitled1.ipynb

Large diffs are not rendered by default.

Binary file added __pycache__/generator_utils.cpython-36.pyc
Binary file not shown.
Binary file added attn_decoder1.pt
Binary file not shown.
25 changes: 25 additions & 0 deletions back.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
from flask import Flask, request, render_template
import os
from werkzeug.datastructures import ImmutableMultiDict

app = Flask(__name__)

@app.route('/')
def index():
return render_template("index.html") #running the front end

@app.route('/result',methods = ['POST']) # when query is submitted in front end
def result():
data = request.form # data is empty
# need posted data here
imd = data.to_dict(flat=False)
print("answer=",imd['Query'][0]) #to convert the data type of the the query object generated to dict
ans = imd['Query'][0]
os.system("python test.py "+str(ans))
f = open("output.txt", "r") #read the sparql query and return it on front end
line = f.read()
f.close()
return line

if __name__ == '__main__':
app.run(debug = True)
56 changes: 56 additions & 0 deletions data/1.cpp
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
#include <bits/stdc++.h>

using namespace std;

typedef pair<int,int> II;
typedef vector< II > VII;
typedef vector<int> VI;
typedef vector<long long> VLL;
typedef vector< VI > VVI;
typedef long long int LL;

#define PB push_back
#define MP make_pair
#define F first
#define S second
#define SZ(a) (int)(a.size())
#define ALL(a) a.begin(),a.end()
#define SET(a,b) memset(a,b,sizeof(a))


#define din(n) int n; scanf("%d",&n)
#define dout(n) printf("%d\n",n)
#define llin(n) long long n; scanf("%lld",&n)
#define llout(n) printf("%lld\n",n)
#define strin(s,l) char s[l]; scanf("%s",s)
#define strout(n) printf("%s\n",n)
#define fast_io ios_base::sync_with_stdio(false);cin.tie(NULL)
#define endl '\n'

#define TRACE

#ifdef TRACE
#define trace(...) __f(#__VA_ARGS__, __VA_ARGS__)
template <typename Arg1>
void __f(const char* name, Arg1&& arg1){
cerr << name << " : " << arg1 << '\n';
}
template <typename Arg1, typename... Args>
void __f(const char* names, Arg1&& arg1, Args&&... args){
const char* comma = strchr(names + 1, ',');cerr.write(names, comma - names) << " : " << arg1<<" | ";__f(comma+1, args...);
}
#else
#define trace(...)
#endif
bool cmpManh(const std::pair<long long,long long>& l, const std::pair<long long,long long>& r) {
return ((llabs(l.F) + llabs(l.S)) < (llabs(r.F) + llabs(r.S)));
}

int gcd(int a, int b){
if (a == 0) return b; return gcd(b%a, a);
}

int main(void)
{
return(0);
}
6 changes: 6 additions & 0 deletions data/Compositions2/_model/best_bleu/checkpoint
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
model_checkpoint_path: "translate.ckpt-7000"
all_model_checkpoint_paths: "translate.ckpt-3000"
all_model_checkpoint_paths: "translate.ckpt-4000"
all_model_checkpoint_paths: "translate.ckpt-5000"
all_model_checkpoint_paths: "translate.ckpt-6000"
all_model_checkpoint_paths: "translate.ckpt-7000"
1 change: 1 addition & 0 deletions data/Compositions2/_model/best_bleu/hparams
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
{"src": "en", "tgt": "sparql", "train_prefix": "../data/Compositions2//train", "dev_prefix": "../data/Compositions2//dev", "test_prefix": "../data/Compositions2//test", "vocab_prefix": "../data/Compositions2//vocab", "out_dir": "../data/Compositions2/_model", "num_units": 128, "num_layers": 2, "dropout": 0.2, "unit_type": "lstm", "encoder_type": "uni", "residual": false, "time_major": true, "num_embeddings_partitions": 0, "attention": "", "attention_architecture": "standard", "pass_hidden_state": true, "optimizer": "sgd", "num_train_steps": 10000, "batch_size": 128, "init_op": "uniform", "init_weight": 0.1, "max_gradient_norm": 5.0, "learning_rate": 1.0, "start_decay_step": 0, "decay_factor": 0.98, "decay_steps": 10000, "colocate_gradients_with_ops": true, "num_buckets": 5, "max_train": 0, "src_max_len": 50, "tgt_max_len": 50, "source_reverse": false, "src_max_len_infer": null, "tgt_max_len_infer": null, "infer_batch_size": 32, "beam_width": 0, "length_penalty_weight": 0.0, "sos": "<s>", "eos": "</s>", "bpe_delimiter": null, "forget_bias": 1.0, "num_gpus": 1, "epoch_step": 65, "steps_per_stats": 100, "steps_per_external_eval": null, "share_vocab": false, "metrics": ["bleu"], "log_device_placement": false, "random_seed": null, "num_residual_layers": 0, "src_vocab_size": 6165, "tgt_vocab_size": 6833, "src_vocab_file": "../data/Compositions2/_model/vocab.en", "tgt_vocab_file": "../data/Compositions2/_model/vocab.sparql", "best_bleu": 58.855024973094196, "best_bleu_dir": "../data/Compositions2/_model/best_bleu"}
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
6 changes: 6 additions & 0 deletions data/Compositions2/_model/checkpoint
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
model_checkpoint_path: "translate.ckpt-7000"
all_model_checkpoint_paths: "translate.ckpt-3000"
all_model_checkpoint_paths: "translate.ckpt-4000"
all_model_checkpoint_paths: "translate.ckpt-5000"
all_model_checkpoint_paths: "translate.ckpt-6000"
all_model_checkpoint_paths: "translate.ckpt-7000"
1 change: 1 addition & 0 deletions data/Compositions2/_model/hparams
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
{"src": "en", "tgt": "sparql", "train_prefix": "../data/Compositions2//train", "dev_prefix": "../data/Compositions2//dev", "test_prefix": "../data/Compositions2//test", "vocab_prefix": "../data/Compositions2//vocab", "out_dir": "../data/Compositions2/_model", "num_units": 128, "num_layers": 2, "dropout": 0.2, "unit_type": "lstm", "encoder_type": "uni", "residual": false, "time_major": true, "num_embeddings_partitions": 0, "attention": "", "attention_architecture": "standard", "pass_hidden_state": true, "optimizer": "sgd", "num_train_steps": 10000, "batch_size": 128, "init_op": "uniform", "init_weight": 0.1, "max_gradient_norm": 5.0, "learning_rate": 1.0, "start_decay_step": 0, "decay_factor": 0.98, "decay_steps": 10000, "colocate_gradients_with_ops": true, "num_buckets": 5, "max_train": 0, "src_max_len": 50, "tgt_max_len": 50, "source_reverse": false, "src_max_len_infer": null, "tgt_max_len_infer": null, "infer_batch_size": 32, "beam_width": 0, "length_penalty_weight": 0.0, "sos": "<s>", "eos": "</s>", "bpe_delimiter": null, "forget_bias": 1.0, "num_gpus": 1, "epoch_step": 0, "steps_per_stats": 100, "steps_per_external_eval": null, "share_vocab": false, "metrics": ["bleu"], "log_device_placement": false, "random_seed": null, "num_residual_layers": 0, "src_vocab_size": 6165, "tgt_vocab_size": 6833, "src_vocab_file": "../data/Compositions2/_model/vocab.en", "tgt_vocab_file": "../data/Compositions2/_model/vocab.sparql", "best_bleu": 64.80136634682646, "best_bleu_dir": "../data/Compositions2/_model/best_bleu"}
14 changes: 14 additions & 0 deletions data/Compositions2/_model/log_1573854344
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
# log_file=../data/Compositions2/_model/log_1573854344
# Start step 0, lr 1, Sat Nov 16 03:15:46 2019
global step 100 lr 1 step-time 0.27s wps 8.71K ppl 21.31 bleu 0.00
global step 200 lr 1 step-time 0.27s wps 8.80K ppl 3.20 bleu 0.00
global step 300 lr 1 step-time 0.27s wps 8.82K ppl 2.94 bleu 0.00
global step 400 lr 1 step-time 0.29s wps 8.17K ppl 2.87 bleu 0.00
global step 500 lr 1 step-time 0.28s wps 8.48K ppl 2.84 bleu 0.00
global step 600 lr 1 step-time 0.29s wps 8.14K ppl 2.79 bleu 0.00
global step 700 lr 1 step-time 0.29s wps 8.37K ppl 2.74 bleu 0.00
global step 800 lr 1 step-time 0.29s wps 8.29K ppl 2.67 bleu 0.00
global step 900 lr 1 step-time 0.29s wps 8.17K ppl 2.56 bleu 0.00
global step 1000 lr 1 step-time 0.28s wps 8.45K ppl 2.46 bleu 0.00
# Final, step 1000 lr 1 step-time 0.28 wps 8.45K ppl 2.46, dev ppl 2.53, dev bleu 58.9, test ppl 7.40, test bleu 63.4, Sat Nov 16 03:20:36 2019
# Best bleu, step 1000 step-time 0.28 wps 8.45K, dev ppl 2.53, dev bleu 58.9, test ppl 7.40, test bleu 63.4, Sat Nov 16 03:20:39 2019
60 changes: 60 additions & 0 deletions data/Compositions2/_model/log_1573854658
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
# log_file=../data/Compositions2/_model/log_1573854658
# Start step 1000, lr 1, Sat Nov 16 03:21:02 2019
global step 1100 lr 1 step-time 0.29s wps 8.72K ppl 2.29 bleu 58.86
global step 1200 lr 1 step-time 0.27s wps 8.64K ppl 2.28 bleu 58.86
global step 1300 lr 1 step-time 0.27s wps 8.81K ppl 2.25 bleu 58.86
global step 1400 lr 1 step-time 0.27s wps 8.80K ppl 2.14 bleu 58.86
global step 1500 lr 1 step-time 0.27s wps 8.76K ppl 2.11 bleu 58.86
global step 1600 lr 1 step-time 0.27s wps 8.78K ppl 2.11 bleu 58.86
global step 1700 lr 1 step-time 0.27s wps 8.71K ppl 2.05 bleu 58.86
global step 1800 lr 1 step-time 0.27s wps 8.78K ppl 2.01 bleu 58.86
global step 1900 lr 1 step-time 0.27s wps 8.74K ppl 1.99 bleu 58.86
global step 2000 lr 1 step-time 0.28s wps 8.68K ppl 1.98 bleu 58.86
global step 2100 lr 1 step-time 0.27s wps 8.79K ppl 1.94 bleu 61.84
global step 2200 lr 1 step-time 0.28s wps 8.56K ppl 1.93 bleu 61.84
global step 2300 lr 1 step-time 0.28s wps 8.50K ppl 1.90 bleu 61.84
global step 2400 lr 1 step-time 0.28s wps 8.61K ppl 1.88 bleu 61.84
global step 2500 lr 1 step-time 0.31s wps 7.61K ppl 1.86 bleu 61.84
global step 2600 lr 1 step-time 0.29s wps 8.24K ppl 1.85 bleu 61.84
global step 2700 lr 1 step-time 0.30s wps 7.99K ppl 1.83 bleu 61.84
global step 2800 lr 1 step-time 0.29s wps 8.31K ppl 1.82 bleu 61.84
global step 2900 lr 1 step-time 0.30s wps 7.92K ppl 1.79 bleu 61.84
global step 3000 lr 1 step-time 0.29s wps 8.19K ppl 1.78 bleu 61.84
global step 3100 lr 1 step-time 0.30s wps 8.11K ppl 1.77 bleu 63.27
global step 3200 lr 1 step-time 0.28s wps 8.64K ppl 1.76 bleu 63.27
global step 3300 lr 1 step-time 0.29s wps 8.34K ppl 1.75 bleu 63.27
global step 3400 lr 1 step-time 0.29s wps 8.20K ppl 1.73 bleu 63.27
global step 3500 lr 1 step-time 0.29s wps 8.20K ppl 1.72 bleu 63.27
global step 3600 lr 1 step-time 0.30s wps 7.96K ppl 1.71 bleu 63.27
global step 3700 lr 1 step-time 0.29s wps 8.39K ppl 1.70 bleu 63.27
global step 3800 lr 1 step-time 0.28s wps 8.57K ppl 1.69 bleu 63.27
global step 3900 lr 1 step-time 0.28s wps 8.56K ppl 1.68 bleu 63.27
global step 4000 lr 1 step-time 0.29s wps 8.16K ppl 1.82 bleu 63.27
global step 4100 lr 1 step-time 0.31s wps 7.79K ppl 1.67 bleu 63.45
global step 4200 lr 1 step-time 0.29s wps 8.33K ppl 1.66 bleu 63.45
global step 4300 lr 1 step-time 0.27s wps 8.74K ppl 1.65 bleu 63.45
global step 4400 lr 1 step-time 0.28s wps 8.42K ppl 1.64 bleu 63.45
global step 4500 lr 1 step-time 0.29s wps 8.15K ppl 1.65 bleu 63.45
global step 4600 lr 1 step-time 0.29s wps 8.16K ppl 1.63 bleu 63.45
global step 4700 lr 1 step-time 0.28s wps 8.67K ppl 1.62 bleu 63.45
global step 4800 lr 1 step-time 0.28s wps 8.58K ppl 1.62 bleu 63.45
global step 4900 lr 1 step-time 0.28s wps 8.65K ppl 1.62 bleu 63.45
global step 5000 lr 1 step-time 0.28s wps 8.61K ppl 1.61 bleu 63.45
global step 5100 lr 1 step-time 0.28s wps 8.44K ppl 1.60 bleu 63.82
global step 5200 lr 1 step-time 0.28s wps 8.40K ppl 1.59 bleu 63.82
global step 5300 lr 1 step-time 0.28s wps 8.61K ppl 1.59 bleu 63.82
global step 5400 lr 1 step-time 0.28s wps 8.48K ppl 1.58 bleu 63.82
global step 5500 lr 1 step-time 0.29s wps 8.39K ppl 1.58 bleu 63.82
global step 5600 lr 1 step-time 0.28s wps 8.55K ppl 1.58 bleu 63.82
global step 5700 lr 1 step-time 0.28s wps 8.36K ppl 1.57 bleu 63.82
global step 5800 lr 1 step-time 0.30s wps 8.03K ppl 1.56 bleu 63.82
global step 5900 lr 1 step-time 0.29s wps 8.27K ppl 1.56 bleu 63.82
global step 6000 lr 1 step-time 0.29s wps 8.31K ppl 1.56 bleu 63.82
global step 6100 lr 1 step-time 0.28s wps 8.62K ppl 1.55 bleu 64.33
global step 6200 lr 1 step-time 0.28s wps 8.67K ppl 1.55 bleu 64.33
global step 6300 lr 1 step-time 0.28s wps 8.47K ppl 1.54 bleu 64.33
global step 6400 lr 1 step-time 0.28s wps 8.44K ppl 1.53 bleu 64.33
global step 6500 lr 1 step-time 0.28s wps 8.51K ppl 1.53 bleu 64.33
global step 6600 lr 1 step-time 0.28s wps 8.51K ppl 1.52 bleu 64.33
global step 6700 lr 1 step-time 0.29s wps 8.10K ppl 1.51 bleu 64.33

Loading

0 comments on commit 90034a4

Please sign in to comment.