Skip to content

Commit

Permalink
Leave anomaly and finetuning with training parameters
Browse files Browse the repository at this point in the history
  • Loading branch information
kuronosec committed Apr 29, 2024
1 parent 28db2dc commit 3b64577
Show file tree
Hide file tree
Showing 2 changed files with 7 additions and 5 deletions.
10 changes: 6 additions & 4 deletions analysis/ethereum_smart_contracts/GPT_anomaly_pretraining.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,9 @@
"device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')\n",
"\n",
"model = None\n",
"anomaly_validation = False"
"anomaly_validation = False\n",
"do_train=True\n",
"do_eval=True"
]
},
{
Expand Down Expand Up @@ -234,8 +236,8 @@
" report_to=\"none\",\n",
" output_dir='/data/forta/ethereum/model_anomaly',\n",
" overwrite_output_dir=False,\n",
" do_train=False, \n",
" do_eval=False,\n",
" do_train=do_train, \n",
" do_eval=do_eval,\n",
" per_device_train_batch_size=10,\n",
" per_device_eval_batch_size=10,\n",
" evaluation_strategy='steps',\n",
Expand Down Expand Up @@ -464,7 +466,7 @@
"outputs": [],
"source": [
"# if training_args.do_eval:\n",
"if training_args.do_eval:\n",
"if do_eval:\n",
" eval_output = trainer.evaluate()\n",
" print(eval_output[\"eval_loss\"])\n",
" perplexity = math.exp(eval_output[\"eval_loss\"])\n",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@
"# max_length = 1024\n",
"max_length = None\n",
"\n",
"is_training = False\n",
"is_training = True\n",
"is_validation = True\n",
"\n",
"device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')\n",
Expand Down

0 comments on commit 3b64577

Please sign in to comment.