-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathcommon.smk
137 lines (120 loc) · 5.27 KB
/
common.smk
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
import pandas as pd
def get_threads_for_classifier(wildcards) -> int:
"""Returns the number of threads to use for the classifier specified
in the wildcards as `classifier`.
"""
return 128 if wildcards["classifier"] != "kfp" else 4
def to_memory_per_core(mem_mb: int):
"""Return a function that computes the memory per thread/core."""
def _memory_per_core(wildcards, input, threads) -> int:
return int(mem_mb / threads)
return _memory_per_core
def build_neqo_args(exp_config):
def _builder(wildcards):
if wildcards["defence"] == "front":
args = exp_config["front"]
result = [
"--defence", "front",
"--defence-packet-size", args["packet_size"],
"--front-max-client-pkts", args["max_client_packets"],
"--front-max-server-pkts", args["max_server_packets"],
"--front-peak-max", args["peak_maximum"],
"--front-peak-min", args["peak_minimum"],
]
if "msd_limit_excess" in args:
result += ["--msd-limit-excess", args["msd_limit_excess"]]
if "use_empty_resources" in args:
result += ["--use-empty-resources", str(args["use_empty_resources"]).lower()]
if "max_udp_payload_size" in args:
result += ["--max-udp-payload-size", args["max_udp_payload_size"]]
return result
if wildcards["defence"] == "tamaraw":
args = exp_config["tamaraw"]
result = [
"--defence", "tamaraw",
"--defence-packet-size", args["packet_size"],
"--tamaraw-rate-in", args["rate_in"],
"--tamaraw-rate-out", args["rate_out"],
"--tamaraw-modulo", args["packet_multiple"],
]
if "msd_limit_excess" in args:
result += ["--msd-limit-excess", args["msd_limit_excess"]]
if "use_empty_resources" in args:
result += ["--use-empty-resources", str(args["use_empty_resources"]).lower()]
if "max_udp_payload_size" in args:
result += ["--max-udp-payload-size", args["max_udp_payload_size"]]
if "add_noise" in args:
result += [
"--add-noise",
"--noise-chance", args["noise_chance"],
f"--noise-bound-lower={args['noise_bound_lower']}",
f"--noise-bound-upper={args['noise_bound_upper']}",
]
return result
if wildcards["defence"] == "undefended":
return ["--defence", "none"]
raise ValueError("Unsupported defence: %r", wildcards["defence"])
return _builder
def combine_varcnn_predictions(input, output):
"""Combine two varcnn prediction files and write to the first output."""
sizes = pd.read_csv(input["sizes"]).set_index("y_true", append=True)
times = pd.read_csv(input["times"]).set_index("y_true", append=True)
combined = (sizes + times) / 2
combined = combined.reset_index(level="y_true", drop=False)
combined.to_csv(output[0], header=True, index=False)
rule predict__kfp:
"""Perform hyperparameter validation and predictions for the k-FP classifier
(pattern rule)."""
output:
"{path}/classifier~kfp/predictions.csv",
feature_importance="{path}/classifier~kfp/feature-importances.csv"
input:
"{path}/classifier~kfp/features.csv"
log:
"{path}/classifier~kfp/predictions.log",
cv_results="{path}/classifier~kfp/cv-results.csv",
threads:
workflow.cores
shell:
"workflow/scripts/evaluate_tuned_kfp.py --verbose 0 --n-jobs {threads}"
" --cv-results-path {log[cv_results]} --feature-importance {output[feature_importance]}"
" {input} > {output[0]} 2> {log[0]}"
rule predict__varcnn:
"""Perform hyperparameter validation and predictions for either the sizes or time
component of the Var-CNN classifier (pattern rule)."""
output:
"{path}/classifier~varcnn-{feature_type}/hyperparams~{hyperparams}/predictions.csv"
input:
"{path}/dataset.h5"
log:
"{path}/classifier~varcnn-{feature_type}/hyperparams~{hyperparams}/predictions.log"
threads:
workflow.cores
shell:
"workflow/scripts/evaluate_tuned_varcnn.py --hyperparams {wildcards.hyperparams}"
" {wildcards.feature_type} {input} > {output} 2> {log}"
rule predict__dfnet:
"""Perform hyperparameter validation and predictions for the Deep Fingerprinting
classifier (pattern rule)."""
output:
"{path}/classifier~dfnet/hyperparams~{hyperparams}/predictions.csv"
input:
"{path}/dataset.h5"
log:
"{path}/classifier~dfnet/hyperparams~{hyperparams}/predictions.log"
threads:
workflow.cores
shell:
"workflow/scripts/evaluate_tuned_df.py --hyperparams {wildcards.hyperparams}"
" {input} > {output} 2> {log}"
rule extract_features__kfp:
"""Pre-extract the k-FP features as this can be time-consuming (pattern rule)."""
output:
"{path}/classifier~kfp/features.csv"
input:
"{path}/dataset.h5"
log:
"{path}/classifier~kfp/features.log"
threads: 12
shell:
"workflow/scripts/extract_kfp_features.py {input} > {output} 2> {log}"