presentation

This commit is contained in:
2024-04-11 13:05:55 +02:00
commit af5b886b90
25 changed files with 991 additions and 0 deletions

View File

@@ -0,0 +1,86 @@
2.6,1
2.7,2
2.9,1
3.1,3
3.2,2
3.3,2
3.4,3
3.5,1
3.6,2
3.7,2
3.8,1
3.9,2
4,4
4.1,5
4.3,8
4.4,3
4.5,3
4.6,5
4.7,5
4.8,5
4.9,5
5,4
5.1,4
5.2,6
5.3,3
5.4,3
5.5,6
5.6,7
5.7,2
5.8,7
5.9,6
6,3
6.1,5
6.2,4
6.3,2
6.4,2
6.5,5
6.6,5
6.7,3
6.8,1
6.9,2
7,1
7.1,3
7.2,1
7.3,3
7.4,5
7.5,1
7.6,7
7.7,4
7.8,7
7.9,1
8,1
8.2,2
8.3,3
8.4,2
8.5,1
8.6,1
8.7,2
8.8,1
8.9,1
9,2
9.1,2
9.2,2
9.3,2
9.4,2
9.6,1
10.2,1
10.6,1
10.7,3
10.8,2
11,2
11.1,1
11.3,2
11.8,1
12,1
12.6,1
12.7,1
13.4,1
13.5,2
15.5,1
16.7,1
18.2,1
18.3,1
28.9,1
68.9,1
79.5,1
1 2.6 1
2 2.7 2
3 2.9 1
4 3.1 3
5 3.2 2
6 3.3 2
7 3.4 3
8 3.5 1
9 3.6 2
10 3.7 2
11 3.8 1
12 3.9 2
13 4 4
14 4.1 5
15 4.3 8
16 4.4 3
17 4.5 3
18 4.6 5
19 4.7 5
20 4.8 5
21 4.9 5
22 5 4
23 5.1 4
24 5.2 6
25 5.3 3
26 5.4 3
27 5.5 6
28 5.6 7
29 5.7 2
30 5.8 7
31 5.9 6
32 6 3
33 6.1 5
34 6.2 4
35 6.3 2
36 6.4 2
37 6.5 5
38 6.6 5
39 6.7 3
40 6.8 1
41 6.9 2
42 7 1
43 7.1 3
44 7.2 1
45 7.3 3
46 7.4 5
47 7.5 1
48 7.6 7
49 7.7 4
50 7.8 7
51 7.9 1
52 8 1
53 8.2 2
54 8.3 3
55 8.4 2
56 8.5 1
57 8.6 1
58 8.7 2
59 8.8 1
60 8.9 1
61 9 2
62 9.1 2
63 9.2 2
64 9.3 2
65 9.4 2
66 9.6 1
67 10.2 1
68 10.6 1
69 10.7 3
70 10.8 2
71 11 2
72 11.1 1
73 11.3 2
74 11.8 1
75 12 1
76 12.6 1
77 12.7 1
78 13.4 1
79 13.5 2
80 15.5 1
81 16.7 1
82 18.2 1
83 18.3 1
84 28.9 1
85 68.9 1
86 79.5 1

View File

@@ -0,0 +1,86 @@
16.7,1
19.4,1
19.5,1
20.5,1
20.7,1
21,1
21.3,2
21.9,1
22.1,1
22.5,1
22.6,1
22.7,1
23.2,1
23.4,1
23.7,1
23.9,3
24,1
24.1,2
24.2,1
24.3,4
24.4,3
24.5,1
24.7,2
24.8,2
24.9,1
25.1,1
25.2,1
25.3,2
25.4,2
25.5,1
25.6,2
25.7,1
25.8,3
25.9,4
26,2
26.1,4
26.2,1
26.3,3
26.4,3
26.5,3
26.6,4
26.7,10
26.8,4
26.9,1
27,3
27.1,6
27.2,7
27.3,4
27.4,2
27.5,3
27.6,4
27.7,8
27.8,3
27.9,9
28,4
28.1,5
28.2,2
28.3,4
28.4,7
28.5,1
28.6,10
28.7,5
28.8,2
28.9,6
29,7
29.1,5
29.2,3
29.3,4
29.4,1
29.5,1
29.6,2
29.8,3
30,1
30.1,4
30.2,1
30.3,3
30.4,3
30.5,1
30.6,1
30.7,1
30.8,1
30.9,1
31.1,2
31.5,1
32.1,1
35.7,1
1 16.7 1
2 19.4 1
3 19.5 1
4 20.5 1
5 20.7 1
6 21 1
7 21.3 2
8 21.9 1
9 22.1 1
10 22.5 1
11 22.6 1
12 22.7 1
13 23.2 1
14 23.4 1
15 23.7 1
16 23.9 3
17 24 1
18 24.1 2
19 24.2 1
20 24.3 4
21 24.4 3
22 24.5 1
23 24.7 2
24 24.8 2
25 24.9 1
26 25.1 1
27 25.2 1
28 25.3 2
29 25.4 2
30 25.5 1
31 25.6 2
32 25.7 1
33 25.8 3
34 25.9 4
35 26 2
36 26.1 4
37 26.2 1
38 26.3 3
39 26.4 3
40 26.5 3
41 26.6 4
42 26.7 10
43 26.8 4
44 26.9 1
45 27 3
46 27.1 6
47 27.2 7
48 27.3 4
49 27.4 2
50 27.5 3
51 27.6 4
52 27.7 8
53 27.8 3
54 27.9 9
55 28 4
56 28.1 5
57 28.2 2
58 28.3 4
59 28.4 7
60 28.5 1
61 28.6 10
62 28.7 5
63 28.8 2
64 28.9 6
65 29 7
66 29.1 5
67 29.2 3
68 29.3 4
69 29.4 1
70 29.5 1
71 29.6 2
72 29.8 3
73 30 1
74 30.1 4
75 30.2 1
76 30.3 3
77 30.4 3
78 30.5 1
79 30.6 1
80 30.7 1
81 30.8 1
82 30.9 1
83 31.1 2
84 31.5 1
85 32.1 1
86 35.7 1

BIN
images/alexa.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 684 KiB

BIN
images/google-assistant.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 683 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 198 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 665 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 669 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 241 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 162 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 125 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 260 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 257 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 136 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 242 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 173 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 161 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 232 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 280 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 176 KiB

BIN
images/siri.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 822 KiB

BIN
main.pdf Normal file

Binary file not shown.

1
main.pdfpc Normal file
View File

@@ -0,0 +1 @@
{"pdfpcFormat":2,"disableMarkdown":false,"pages":[{"idx":0,"label":1,"overlay":0,"forcedOverlay":false,"hidden":false},{"idx":1,"label":2,"overlay":0,"forcedOverlay":false,"hidden":false},{"idx":2,"label":3,"overlay":0,"forcedOverlay":false,"hidden":false},{"idx":3,"label":4,"overlay":0,"forcedOverlay":false,"hidden":false,"note":"Three main VAs"},{"idx":4,"label":4,"overlay":1,"forcedOverlay":true,"hidden":false,"note":"Three main VAs"},{"idx":5,"label":5,"overlay":0,"forcedOverlay":false,"hidden":false,"note":"Why do we want to look at smart speakers?"},{"idx":6,"label":5,"overlay":1,"forcedOverlay":true,"hidden":false,"note":"Why do we want to look at smart speakers?"},{"idx":7,"label":5,"overlay":2,"forcedOverlay":true,"hidden":false,"note":"Why do we want to look at smart speakers?"},{"idx":8,"label":6,"overlay":0,"forcedOverlay":false,"hidden":false,"note":"skill squatting; specific to alexa, skills basically apps"},{"idx":9,"label":6,"overlay":1,"forcedOverlay":true,"hidden":false,"note":"skill squatting; specific to alexa, skills basically apps"},{"idx":10,"label":7,"overlay":0,"forcedOverlay":false,"hidden":false},{"idx":11,"label":8,"overlay":0,"forcedOverlay":false,"hidden":false,"note":"Just after the release of SSL 3.0"},{"idx":12,"label":8,"overlay":1,"forcedOverlay":true,"hidden":false,"note":"Just after the release of SSL 3.0"},{"idx":13,"label":8,"overlay":2,"forcedOverlay":true,"hidden":false,"note":"Just after the release of SSL 3.0"},{"idx":14,"label":9,"overlay":0,"forcedOverlay":false,"hidden":false,"note":"Relevant developments in my opinion"},{"idx":15,"label":10,"overlay":0,"forcedOverlay":false,"hidden":false,"note":"2. to filter traffic"},{"idx":16,"label":11,"overlay":0,"forcedOverlay":false,"hidden":false},{"idx":17,"label":12,"overlay":0,"forcedOverlay":false,"hidden":false},{"idx":18,"label":13,"overlay":0,"forcedOverlay":false,"hidden":false},{"idx":19,"label":14,"overlay":0,"forcedOverlay":false,"hidden":false},{"idx":20,"label":15,"overlay":0,"forcedOverlay":false,"hidden":false,"note":"Why? Let's look at Website Traffic Fingerprinting"},{"idx":21,"label":15,"overlay":1,"forcedOverlay":true,"hidden":false,"note":"Why? Let's look at Website Traffic Fingerprinting"},{"idx":22,"label":15,"overlay":2,"forcedOverlay":true,"hidden":false,"note":"Why? Let's look at Website Traffic Fingerprinting"},{"idx":23,"label":15,"overlay":3,"forcedOverlay":true,"hidden":false,"note":"Why? Let's look at Website Traffic Fingerprinting"},{"idx":24,"label":15,"overlay":4,"forcedOverlay":true,"hidden":false,"note":"Why? Let's look at Website Traffic Fingerprinting"},{"idx":25,"label":15,"overlay":5,"forcedOverlay":true,"hidden":false,"note":"Why? Let's look at Website Traffic Fingerprinting"},{"idx":26,"label":16,"overlay":0,"forcedOverlay":false,"hidden":false,"note":"Found in one of the newer papers"},{"idx":27,"label":17,"overlay":0,"forcedOverlay":false,"hidden":false},{"idx":28,"label":18,"overlay":0,"forcedOverlay":false,"hidden":false},{"idx":29,"label":19,"overlay":0,"forcedOverlay":false,"hidden":false},{"idx":30,"label":20,"overlay":0,"forcedOverlay":false,"hidden":false},{"idx":31,"label":21,"overlay":0,"forcedOverlay":false,"hidden":false,"note":"Some outliers like \"tell me a story\" at >1min"},{"idx":32,"label":22,"overlay":0,"forcedOverlay":false,"hidden":false},{"idx":33,"label":23,"overlay":0,"forcedOverlay":false,"hidden":false},{"idx":34,"label":24,"overlay":0,"forcedOverlay":false,"hidden":false},{"idx":35,"label":25,"overlay":0,"forcedOverlay":false,"hidden":false,"note":"Since I've trained the model on my laptop, I used a smaller network."},{"idx":36,"label":26,"overlay":0,"forcedOverlay":false,"hidden":false}]}

549
main.typ Normal file
View File

@@ -0,0 +1,549 @@
#import "@preview/polylux:0.3.1": *
#import "university-custom.typ": *
#import "@preview/fletcher:0.4.1" as fletcher: node, edge
#import "@preview/cetz:0.2.0"
#let mint = rgb("a5d7d2")
#let mint-hell = rgb("d2ebe9")
#let rot = rgb("d20537")
#let anthrazit = rgb("2d373c")
#let anthrazit-hell = rgb("46505a")
#show quote: it => {
if it.block {
it
} else {
emph(it)
}
}
// diagrams
#let edge_(..args) = edge(..args, marks: (none, "|>")) // we can't use set rules for user defined functions yet, so we overwrite it
#let group_node(colour, ..args) = node(
..args,
stroke: colour,
fill: colour.lighten(80%),
)
#let group_edge(colour, description, ..args) = edge(
..args,
text(colour)[#description],
"..",
stroke: colour,
label-side: left,
)
#let enclose_nodes(nodes, colour, clearance: (8pt, 8pt, 8pt, 8pt)) = {
let (center, size) = fletcher.bounding-rect(nodes.map(node => node.real-pos))
center.at(0) = center.at(0) - (clearance.at(3) - clearance.at(1))
center.at(1) = center.at(1) - (clearance.at(2) - clearance.at(0))
cetz.draw.content(
center,
rect(
width: size.at(0) + clearance.at(1) + clearance.at(3),
height: size.at(1) + clearance.at(2) + clearance.at(0),
radius: 16pt,
stroke: colour,
fill: colour.lighten(85%),
)
)
}
// functions
#let focus-title-slide(title) = {
focus-slide(background-color: white)[
#align(center, text(fill: anthrazit, size: 0.7em, weight: "bold", title))
]
}
// #pdfpc.config(
// duration-minutes: 30,
// // start-time: "10:15",
// // end-time: "10:45",
// // note-font-size: 24,
// )
#show: university-theme.with(
short-title: [A Testbed for Voice Assistant Traffic Fingerprinting],
short-author: [Milan van Zanten],
color-a: anthrazit,
color-b: mint,
)
#title-slide(
title: [A Testbed for Voice Assistant \ Traffic Fingerprinting],
subtitle: [Master Thesis Presentation],
authors: [Milan van Zanten],
date: [21.03.2024],
institution-name: [University of Basel],
logo: pad(1em, image("unibas-logo.svg"))
)
#slide(title: [Outline])[
+ Voice Assistants
+ Traffic Fingerprinting
+ Testbed
+ Results
+ Demo
Ask questions any time!
]
#focus-title-slide[
A Testbed for #text(fill: rot)[Voice Assistant] \ Traffic Fingerprinting
]
#slide(title: [Devices], new-section: [Voice Assistants])[
#pdfpc.speaker-note("Three main VAs")
Specifically, #emph[Smart Speakers]
#only(1)[
#side-by-side[
#align(center)[
#image(width: 50%, "images/alexa.jpg")
Echo Dot
#emph[Amazon Alexa]
]
][
#align(center)[
#image(width: 50%, "images/siri.jpg")
HomePod Mini
#emph[Siri]
]
][
#align(center)[
#image(width: 50%, "images/google-assistant.jpg")
Google Home Mini
#emph[Google Assistant]
]
]
]
#only(2)[
#side-by-side[
#align(center)[
#image(width: 40%, "images/alexa.jpg")
#text(size: .6em)[
Echo Dot
#emph[Amazon Alexa]
]
]
][
#align(center)[
#image(width: 80%, "images/siri.jpg")
HomePod Mini
#emph[Siri]
]
][
#align(center)[
#image(width: 40%, "images/google-assistant.jpg")
#text(size: .6em)[
Google Home Mini
#emph[Google Assistant]
]
]
]
]
]
#slide(title: [Smart Speaker Privacy / Security])[
#counter(footnote).update(0) // reset footnote counter
#pdfpc.speaker-note("Why do we want to look at smart speakers?")
#pdfpc.speaker-note("Alexa; guest voice wrongly recognised 65%")
#alternatives(repeat-last: true)[
none #h(1em) `(╯°□°)╯︵ ┻━┻`
][
There are concerns...
]
#pause
- Usually located where sensitive conversations take place
- Necessarily always listening
- Misactivations
- Used to control smart home devices (e.g. door locks)
- No authentication\*
#uncover(3)[About 40% of households in the U.S. own a smart speaker.]
#v(1em)
#text(0.6em)[\* Voice recognition is still insecure.]
]
#slide(title: [Attacks on Smart Speakers])[
#pdfpc.speaker-note("skill squatting; specific to alexa, skills basically apps")
#pdfpc.speaker-note("Boil an egg is an existing skill")
Active:
- Malicious activations
- Similar pronounciations, "skill squatting"
- (e.g. "Boil an egg" $->$ "Boyle an egg")#footnote[D. Kumar et al., #quote[Skill Squatting Attacks on Amazon Alexa], August 2018, Available: #link("https://www.usenix.org/conference/usenixsecurity18/presentation/kumar")]
Passive:
- #alternatives[Traffic Fingerprinting][#text(fill: rot)[Traffic Fingerprinting]]
]
#focus-title-slide[
A Testbed for Voice Assistant \ #text(fill: rot)[Traffic Fingerprinting]
]
#slide(title: [], new-section: [Traffic Fingerprinting])[
#pdfpc.speaker-note("Just after the release of SSL 3.0")
#pdfpc.speaker-note("David Wagner and Bruce Schneier")
#pdfpc.speaker-note("Probably one of the first mentions")
#v(2em)
#quote(attribution: [Wagner and Schneier#footnote[D. Wagner and B. Schneier, #quote[Analysis of the SSL 3.0 Protocol], November 1996, Available: #link("https://dl.acm.org/doi/10.5555/1267167.1267171")]], block: true, quotes: true)[[SSL] traffic analysis aims to recover confidential information about protection sessions by examining unencrypted packet fields and #alternatives(repeat-last: true)[unprotected packet attributes][#text(fill: rot)[unprotected packet attributes]]. For example [...] the volume of network traffic flow]
#uncover(3)[... packet direction, timing, and more]
]
#slide(title: [Timeline])[
#counter(footnote).update(0) // reset footnote counter
#pdfpc.speaker-note("Relevant developments in my opinion")
#pdfpc.speaker-note("Abe and Goto; Denoising Autoencoder on Tor traffic")
#pdfpc.speaker-note("Mao et al.; time between packets")
#pdfpc.speaker-note("Ahmed, Sabir and Das; so far all attacks assumed known traffic window, allowed them to do end-to-end")
/ 1996: Wagner and Schneier#footnote[Timeline references can be found at the end of the presentation.], #text(fill: rot)[coined SSL traffic analysis]
/ 1998: Cheng and Avnur, #text(fill: rot)[website traffic analysis]
#h(4em) #text(fill: rot)[website fingerprinting (WF)...]
/ 2016: Abe and Goto, #text(fill: rot)[deep learning WF]
/ 2019: Kennedy et al., #text(fill: rot)[apply WF techniques to voice assistants (VA)]
/ 2020: Wang et al., #text(fill: rot)[deep learning VA fingerprinting]
/ 2022: Mao et al., #text(fill: rot)[temporal features]
/ 2023: Ahmed, Sabir and Das, #text(fill: rot)[invocation detection]
]
#slide(title: [Threat Model])[
#pdfpc.speaker-note("2. to filter traffic")
#align(center, text(size: 0.7em,
fletcher.diagram(
node-stroke: 1pt,
edge-stroke: 1pt,
node-corner-radius: 8pt,
edge-corner-radius: 8pt,
node-fill: rgb("eee"),
spacing: 3em,
node((-2, 0), `VA Server`),
edge(`WAN`),
node((0, 0), `Gateway`),
edge("rr", `LAN`),
edge("r", "d", "r"),
node((2, 0), `Smart Speaker`),
node((2, 1), `Other Devices`),
group_node(rot, (1, -1), `Attacker`),
edge((1, -0.4), text(fill: rot)[`Intercept`], stroke: rot),
)
))
+ The attacker can intercept traffic from smart speaker
+ The attacker knows the smart speaker address
+ The attacker knows the type of smart speaker used
+ The attacker knows the beginning and end of an interaction
]
#slide(title: [Closed-World])[
#v(2em)
- Fixed list of monitored voice commands
- Traffic is considered to come from one of the monitored commands
- Multiclass classification
Predicts which command was used.
]
#slide(title: [Open-World])[
#v(2em)
- Traffic can also come from new, unmonitored commands
- Binary-classification
Predicts whether traffic is from monitored or unmonitored command.
]
#slide(title: [Combining Both Models])[
#v(2em)
#align(center, text(size: 0.7em,
fletcher.diagram(
node-stroke: 1pt,
edge-stroke: 1pt,
node-corner-radius: 8pt,
edge-corner-radius: 8pt,
node-fill: rgb("eee"),
spacing: 3em,
node((0, 0), `traffic`),
edge_(),
node((1, 0), `open-world classification`),
edge_("d", `unmonitored`),
edge_(`monitored`),
node((3, 0), `closed-world classification`),
edge_(),
node((3, 1), `prediction`),
node((1, 1), `unknown`),
)
))
]
#focus-title-slide[
A #text(fill: rot)[Testbed] for Voice Assistant \ Traffic Fingerprinting
]
#slide(title: [Comparison], new-section: [Testbed])[
#pdfpc.speaker-note("Why? Let's look at Website Traffic Fingerprinting")
#side-by-side[
Website Fingerprinting:
- Requires a large amount of data
- Data collection usually via program making requests
- Only dependent on network environment
- Fast
][
Voice Command Fingerprinting: #pause
- Requires a large amount of data #pause
- Interaction by speaking \ ~ #pause
- Hampered by environment noise \ ~ #pause
- Slow and inefficient #pause
#text(fill: rot)[$->$ Sophisticated testbed]
]
]
#slide(title: [])[
#counter(footnote).update(0) // reset footnote counter
#pdfpc.speaker-note("Found in one of the newer papers")
#v(4em)
#quote(attribution: [Mao et al.#footnote[Jianghan Mao et al., #quote[A novel model for voice command fingerprinting using deep learning], March 2022, Available: #link("https://doi.org/10.1016/j.jisa.2021.103085")]], block: true, quotes: true)[The content of voice commands may vary from date to date; therefore, more efficient data collection tools need to be developed.]
]
#slide(title: [Requirements])[
- Sound isolation
- Isolated box
- Separate speaker/microphone
- Efficiency
- Every second saved per interaction means hours saved when collecting dozens of thousands interactions
- Dynamic interaction length by listening for silence
- Robustness
- Autonomously reset VA if error occurs
- Monitoring system
]
#slide(title: [System])[
#text(size: 0.8em)[
/ `varys`: The main executable combining all modules into the final system.
/ `varys-analysis`: Analysis of data collected by varys.
/ `varys-audio`: Recording audio and the TTS and STT systems.
/ `varys-database`: Abstraction of the database system where interactions are stored.
/ `varys-network`: Collection of network traffic, writing and parsing of `.pcap` files.
]
#v(1em)
#align(center, text(size: 0.7em)[
#fletcher.diagram(
node-stroke: 1pt,
edge-stroke: 1pt,
node-corner-radius: 4pt,
edge-corner-radius: 4pt,
spacing: 1.5em,
node((0, 0), `varys`, fill: rgb("eee")),
edge_("r"),
edge_("dd", "l", "d"),
edge_("ddd"),
edge_("dd", "r", "d"),
group_node(anthrazit, (1, 0), "varys-analysis"),
edge_("d", (0.12, 1), (0.12, 2.625)),
edge_("d", (1.2, 1), (1.2, 2.625)),
group_node(anthrazit, (-1, 3), "varys-audio"),
group_node(anthrazit, (0, 3), "varys-network"),
group_node(anthrazit, (1, 3), "varys-database"),
)
])
]
#focus-title-slide[
Results
]
#slide(title: [Datasets], new-section: [Results])[
#v(2em)
\~800h, \~70'000 interactions
~
/ ` large`: 227 queries, 140 interactions each
/ ` small`: 13 queries, 2400 interactions each
/ `binary`: #quote[Call John Doe] and #quote[Call Mary Poppins], 1500 interactions each
]
#slide(title: [Efficiency])[
#pdfpc.speaker-note("Some outliers like \"tell me a story\" at >1min")
#align(center, text(size: 0.8em)[
#cetz.canvas({
import cetz.draw: *
import cetz.plot
let data = csv("csv/aggregate_average_duration.csv").map(item => {
(float(item.at(0)), int(item.at(1)))
})
let data_remaining = csv("csv/aggregate_average_remaining_duration.csv").map(item => {
(float(item.at(0)), int(item.at(1)))
})
set-style(legend: (padding: 5pt, item: (spacing: 10pt)))
plot.plot(
size: (24, 8),
axis-style: "scientific-auto",
legend: "legend.inner-north",
x-label: "average duration [s]",
x-tick-step: 2,
x-min: 0,
x-max: 36,
y-label: "queries",
y-tick-step: 1,
y-min: 0,
{
for item in data {
plot.add(
((item.at(0), 0), (..item)),
style: (stroke: 2pt + mint),
)
}
plot.add(
((100, 0), (100, 1)),
style: (stroke: 8pt + mint),
label: "Avg. Speaking Duration",
)
for item in data_remaining {
plot.add(
((item.at(0), 0), (..item)),
style: (stroke: 2pt + rot),
)
}
plot.add(
((100, 0), (100, 1)),
style: (stroke: 8pt + rot),
label: "Avg. Remaining Duration",
)
})
})
])
]
#slide(title: [Traffic Trace Examples])[
#side-by-side[
#image(width: 100%, height: 89%, "images/plots/plot-Hey Siri. Any missed calls.png")
#v(10pt, weak: true)
#quote[Any missed calls?]
][
#image(width: 100%, height: 89%, "images/plots/plot-Hey Siri. What day was 90 days ago.png")
#v(10pt, weak: true)
#quote[What day was 90 days ago?]
]
]
#slide(title: [Fingerprinting Model])[
#counter(footnote).update(0) // reset footnote counter
#text(size: 0.96em)[
Feature extraction #text(size: 0.8em)[(packet size $s in [0, 1500]$ and direction $d in {0, 1}$)]:
$$$
(s, d) -> (-1)^(d) dot s/1500 #h(0.8em)
$$$
CNN adapted from Wang et al.#footnote[Chenggang Wang et al., #quote[Fingerprinting Encrypted Voice Traffic on Smart Speakers with Deep Learning], May 2020, Available: #link("https://doi.org/10.1145/3395351.3399357")]:
]
#text(size: 0.65em)[
#fletcher.diagram(
node-stroke: 1pt,
node-fill: rgb("eee"),
edge-stroke: 1pt,
node-corner-radius: 4pt,
edge-corner-radius: 4pt,
spacing: 1.5em,
group_node(rot, (0, 0), align(center)[Input]),
edge_(),
node((1, 0), align(center)[Conv. Layer \ `[tanh]`]),
edge_(),
node((2, 0), align(center)[Dropout \ $0.1$]),
edge_(),
node((3, 0), align(center)[Global Average \ Pooling]),
edge_(),
node((4, 0), align(center)[Dense Layer \ `[elu]`]),
edge_(),
node((5, 0), align(center)[Dense Layer \ `[softmax]`]),
edge_(),
group_node(rot, (6, 0), align(center)[Output]),
render: (grid, nodes, edges, options) => {
let cnn_1 = (nodes.at(1), nodes.at(2), nodes.at(3))
cetz.canvas({
enclose_nodes(cnn_1, rgb(0, 0, 0, 50%), clearance: (34pt, 64pt, 34pt, 61pt))
fletcher.draw-diagram(grid, nodes, edges, options)
})
}
)
]
]
#slide(title: [Classification])[
#v(2em)
Accuracy on test sets:
/ ` large`: \~40.40% (random choice \~0.44%)
/ ` small`: \~86.19% (random choice \~7.69%)
/ `binary`: \~71.19% (random choice 50%)
]
#slide(title: [Demo], new-section: [])[
#pdfpc.speaker-note("Since I've trained the model on my laptop, I used a smaller network.")
#text(size: 0.66em)[
```sh
./target/release/varys -i ap1 analyse demo data/ml/test_5_13\ queries_0.86 f4:34:f0:89:2d:75
```
]
#quote[Hey Siri, any missed calls?]
#quote[Hey Siri, remind me to wash the car.]
#v(1em)
#text(size: 0.66em)[It is unlikely this will work...]
]
#slide(header: pad(left: 1em, top: 0.5em, heading[Timeline References]))[
<references>
#text(size: 0.7em)[
- D. Wagner and B. Schneier, #quote[Analysis of the SSL 3.0 Protocol], November 1996, Available: #link("https://dl.acm.org/doi/10.5555/1267167.1267171")
- H. Cheng and R. Avnur, #quote[Traffic Analysis of SSL Encrypted Web Browsing], 1998
- K. Abe and S. Goto, #quote[Fingerprinting Attack on Tor Anonymity using Deep Learning], August 2016, Available: #link("https://core.ac.uk/display/229876143")
- S. Kennedy et al., #quote[I Can Hear Your Alexa: Voice Command Fingerprinting on Smart Home Speakers], June 2019, Available: #link("https://doi.org/10.1109/CNS.2019.8802686")
- Chenggang Wang et al., #quote[Fingerprinting Encrypted Voice Traffic on Smart Speakers with Deep Learning], May 2020, Available: #link("https://doi.org/10.1145/3395351.3399357")
- Jianghan Mao et al., #quote[A novel model for voice command fingerprinting using deep learning], March 2022, Available: #link("https://doi.org/10.1016/j.jisa.2021.103085")
- D. Ahmed, A. Sabir, and A. Das, #quote[Spying through Your Voice Assistants: Realistic Voice Command Fingerprinting], August 2023, Available: #link("https://www.usenix.org/conference/usenixsecurity23/presentation/ahmed-dilawer")
]
]

20
unibas-logo.svg Normal file

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 20 KiB

249
university-custom.typ Normal file
View File

@@ -0,0 +1,249 @@
#import "@preview/polylux:0.3.1": logic, utils
// University theme
//
// Originally contributed by Pol Dellaiera - https://github.com/drupol
//
// Please feel free to improve this theme
// by submitting a PR in https://github.com/andreasKroepelin/typst-slides
//
// Adapted by https://github.com/m-vz
#let uni-colors = state("uni-colors", (:))
#let uni-short-title = state("uni-short-title", none)
#let uni-short-author = state("uni-short-author", none)
#let uni-short-date = state("uni-short-date", none)
#let uni-progress-bar = state("uni-progress-bar", true)
#let university-theme(
aspect-ratio: "16-9",
short-title: none,
short-author: none,
short-date: none,
color-a: rgb("#0C6291"),
color-b: rgb("#A63446"),
color-c: rgb("#FBFEF9"),
progress-bar: true,
body
) = {
set page(
paper: "presentation-" + aspect-ratio,
margin: 0em,
header: none,
footer: none,
)
set text(size: 25pt)
show footnote.entry: set text(size: .6em)
uni-progress-bar.update(progress-bar)
uni-colors.update((a: color-a, b: color-b, c: color-c))
uni-short-title.update(short-title)
uni-short-author.update(short-author)
uni-short-date.update(short-date)
body
}
#let title-slide(
title: [],
subtitle: none,
authors: (),
institution-name: "University",
date: none,
logo: none,
) = {
let authors = if type(authors) == "array" { authors } else { (authors,) }
let content = locate( loc => {
let colors = uni-colors.at(loc)
if logo != none {
align(right, logo)
}
align(center + horizon, {
block(
inset: 0em,
breakable: false,
{
text(size: 1.5em, fill: colors.a, strong(title))
if subtitle != none {
parbreak()
text(size: 1em, fill: colors.a, subtitle)
}
}
)
set text(size: .8em)
grid(
columns: (1fr,) * calc.min(authors.len(), 3),
column-gutter: 1em,
row-gutter: 1em,
..authors.map(author => text(fill: black, author))
)
v(1em)
if institution-name != none {
parbreak()
text(size: .9em, institution-name)
}
if date != none {
parbreak()
text(size: .8em, date)
}
})
})
logic.polylux-slide(content)
}
#let slide(
title: none,
header: none,
footer: none,
new-section: none,
body
) = {
let body = pad(x: 2em, y: 1em, body)
let progress-barline = locate( loc => {
if uni-progress-bar.at(loc) {
let cell = block.with( width: 100%, height: 100%, above: 0pt, below: 0pt, breakable: false )
let colors = uni-colors.at(loc)
utils.polylux-progress( ratio => {
grid(
rows: 2pt, columns: (ratio * 100%, 1fr),
cell(fill: colors.a),
cell(fill: colors.b)
)
})
} else { [] }
})
let header-text = {
if header != none {
header
} else if title != none {
if new-section != none {
utils.register-section(new-section)
}
locate( loc => {
let colors = uni-colors.at(loc)
block(fill: colors.c, inset: (x: 1em, y: 0.5em), grid(
columns: (60%, 40%),
align(top + left, heading(level: 2, text(fill: colors.a, title))),
align(top + right, text(weight: "bold", fill: colors.a.lighten(65%), utils.current-section))
))
})
} else { [] }
}
let header = {
set align(top)
grid(rows: (auto, auto), row-gutter: 5mm, progress-barline, header-text)
}
let footer = {
set text(size: 10pt)
set align(center + bottom)
let cell(fill: none, it) = rect(
width: 100%, height: 100%, inset: 1mm, outset: 0mm, fill: fill, stroke: none,
align(horizon, text(fill: white, it))
)
if footer != none {
footer
} else {
locate( loc => {
let colors = uni-colors.at(loc)
show: block.with(width: 100%, height: auto, fill: colors.b)
grid(
columns: (25%, 1fr, 15%, 10%),
rows: (1.5em, auto),
cell(fill: colors.a, uni-short-author.display()),
cell(text(fill: black, uni-short-title.display())),
cell(text(fill: black, uni-short-date.display())),
cell(text(fill: black, logic.logical-slide.display() + [~/~] + utils.last-slide-number))
)
})
}
}
set page(
margin: ( top: 3em, bottom: 1em, x: 0em ),
header: header,
footer: footer,
footer-descent: 0em,
header-ascent: .6em,
)
logic.polylux-slide(body)
}
#let focus-slide(background-color: none, background-img: none, body) = {
let background-color = if background-img == none and background-color == none {
rgb("#0C6291")
} else {
background-color
}
set page(fill: background-color, margin: 1em) if background-color != none
set page(
background: {
set image(fit: "stretch", width: 100%, height: 100%)
background-img
},
margin: 1em,
) if background-img != none
set text(fill: white, size: 2em)
logic.polylux-slide(align(horizon, body))
}
#let matrix-slide(columns: none, rows: none, ..bodies) = {
let bodies = bodies.pos()
let columns = if type(columns) == "integer" {
(1fr,) * columns
} else if columns == none {
(1fr,) * bodies.len()
} else {
columns
}
let num-cols = columns.len()
let rows = if type(rows) == "integer" {
(1fr,) * rows
} else if rows == none {
let quotient = calc.quo(bodies.len(), num-cols)
let correction = if calc.rem(bodies.len(), num-cols) == 0 { 0 } else { 1 }
(1fr,) * (quotient + correction)
} else {
rows
}
let num-rows = rows.len()
if num-rows * num-cols < bodies.len() {
panic("number of rows (" + str(num-rows) + ") * number of columns (" + str(num-cols) + ") must at least be number of content arguments (" + str(bodies.len()) + ")")
}
let cart-idx(i) = (calc.quo(i, num-cols), calc.rem(i, num-cols))
let color-body(idx-body) = {
let (idx, body) = idx-body
let (row, col) = cart-idx(idx)
let color = if calc.even(row + col) { white } else { silver }
set align(center + horizon)
rect(inset: .5em, width: 100%, height: 100%, fill: color, body)
}
let content = grid(
columns: columns, rows: rows,
gutter: 0pt,
..bodies.enumerate().map(color-body)
)
logic.polylux-slide(content)
}