Skip to content

Commit 6f588b9

Browse files
Aanaluse autumn 2025
1 parent 5e6630b commit 6f588b9

File tree

4 files changed

+127
-0
lines changed

4 files changed

+127
-0
lines changed
Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1 +1,9 @@
11
# Evaluation 2025-11
2+
3+
- [analysis.R](analyse.R)
4+
5+
## [Analysis](analyse.R)
6+
7+
- The same data as a table: [confidences_course.csv](confidences_course.csv)
8+
9+
![Confidences per question](confidences_per_question.png)
Lines changed: 83 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,83 @@
1+
#!/bin/env Rscript
2+
3+
read_day_1_confidences <- function() {
4+
readr::read_csv("../../../naiss_intro_python/docs/evaluations/20251127//average_confidences.csv", show_col_types = FALSE)
5+
}
6+
read_day_2_confidences <- function() {
7+
readr::read_csv("../20251128_day_2/average_confidences.csv", show_col_types = FALSE)
8+
}
9+
read_day_3_confidences <- function() {
10+
readr::read_csv("../20251201_day_3/average_confidences.csv", show_col_types = FALSE)
11+
}
12+
read_day_4_confidences <- function() {
13+
readr::read_csv("../20251202_day_4/average_confidences.csv", show_col_types = FALSE)
14+
}
15+
16+
get_day_1_condidences <- function() {
17+
t <- read_day_1_confidences()
18+
names(t) <- c("learning_outcomes", "average_confidence")
19+
t$day <- 1
20+
t
21+
}
22+
get_day_2_condidences <- function() {
23+
t <- read_day_2_confidences()
24+
names(t) <- c("learning_outcomes", "average_confidence")
25+
t$day <- 2
26+
t
27+
}
28+
get_day_3_condidences <- function() {
29+
t <- read_day_3_confidences()
30+
names(t) <- c("learning_outcomes", "average_confidence")
31+
t$day <- 3
32+
t
33+
}
34+
get_day_4_condidences <- function() {
35+
t <- read_day_4_confidences()
36+
names(t) <- c("learning_outcomes", "average_confidence")
37+
t$day <- 4
38+
t
39+
}
40+
get_condidences <- function() {
41+
dplyr::bind_rows(
42+
get_day_1_condidences(),
43+
get_day_2_condidences(),
44+
get_day_3_condidences(),
45+
get_day_4_condidences()
46+
)
47+
}
48+
49+
t <- get_condidences()
50+
t$day <- as.factor(t$day)
51+
t <- t |> dplyr::arrange(average_confidence)
52+
t$learning_outcomes <- as.factor(t$learning_outcomes)
53+
t$learning_outcomes <- reorder(
54+
x = t$learning_outcomes,
55+
X = order(t$average_confidence),
56+
decreasing = TRUE
57+
)
58+
readr::write_csv(t, "confidences_course.csv")
59+
60+
average_average_confidence <- mean(t$average_confidence)
61+
62+
ggplot2::ggplot(t,
63+
ggplot2::aes(
64+
x = average_confidence,
65+
y = learning_outcomes,
66+
fill = day
67+
)
68+
) +
69+
ggplot2::geom_col() +
70+
ggplot2::geom_vline(xintercept = average_average_confidence, lty = "dashed") +
71+
ggplot2::theme(
72+
strip.text.y = ggplot2::element_text(angle = 0),
73+
legend.position = "bottom",
74+
axis.text = ggplot2::element_text(size = 7)
75+
) +
76+
ggplot2::labs(
77+
title = "Confidences per question",
78+
caption = paste0(
79+
" Dashed line denotes the average at ", round(average_average_confidence, digits = 2)
80+
)
81+
)
82+
83+
ggplot2::ggsave(filename = "confidences_per_question.png", width = 7, height = 7)
Lines changed: 36 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,36 @@
1+
learning_outcomes,average_confidence,day
2+
I can determine which ML and DL modules are installed at a center,2,4
3+
I can run a Python code that uses PyTorch,2.25,4
4+
I can run a Python code that uses TensorFlow,2.5,4
5+
I am comfortable using a Python book,2.6666666666666665,1
6+
I can write a batch script that uses GPUs,2.75,4
7+
I know the basics of using the different parallelization tools in Python,2.75,4
8+
I can make a plot with Seaborn,2.8,3
9+
"I can use Pandas (basic operations - statistics, binary operators, vectorized math, string methods)",2.8,3
10+
"I can use Pandas (inspect, clean, and sort data for later operations)",2.8,3
11+
I can load a Python (machine learning) module,3,4
12+
I know how a simple for loop can be parallelized with Multiprocessing and how it can run on multiple cores,3,4
13+
I can use Pandas (input/output Pandas data),3.2,3
14+
I can write a batch script,3.4,3
15+
I can check that I am in an interactive session,3.5,2
16+
I can decide on useful file formats for big data,3.6,3
17+
I can use a text editor on my HPC cluster,3.6666666666666665,1
18+
I can use the compute nodes interactively,3.75,2
19+
I can submit a script to the job scheduler,3.8,3
20+
I am comfortable using the documentation of my HPC center,4,1
21+
"I can work (create, activate, work, deactivate) with a venv virtual environment",4,2
22+
I can create plots in matplotlib,4,3
23+
I can determine if a Python package is installed,4.25,2
24+
I can use a Python IDE,4.25,2
25+
I can describe what the Python interpreter is,4.333333333333333,1
26+
I can run a Python script that uses a graphical library on an HPC cluster,4.333333333333333,1
27+
I can find and load a Python package module,4.5,2
28+
I can install Python packages,4.5,2
29+
I can read and write to/from a file in Python,4.666666666666667,1
30+
I am comfortable learning Python,5,1
31+
I can convert a simple equation to Python code,5,1
32+
I can convert a simple text question to Python code,5,1
33+
I can create a Python script,5,1
34+
I can create and use a variable in Python,5,1
35+
I can load a Python version on my HPC cluster,5,1
36+
I can run a Python script,5,1
283 KB
Loading

0 commit comments

Comments
 (0)