Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
64 changes: 64 additions & 0 deletions Week3/assignment/3.1-normalization.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
# 3.1 — SQL Normalization

## 1. What violates 1NF?

- `food_code = "C1, C2"` → multiple values in a single column.
- `food_description = "Curry, Cake"` → same issue.
- `dinner_date` → inconsistent formats (`2020-03-15`, `20-03-2020`, `Mar 25 '20`). It should always be stored as `DATE`.

## 2. What entities can be extracted?

- **Members** (club members)
- **Dinners** (events)
- **Venues** (locations)
- **Foods** (dishes)

Relationships:

- Members ↔ Dinners (many-to-many, attendance)
- Dinners ↔ Foods (many-to-many, menu)
- Dinners → Venues (many-to-one)

## 3. 3NF-compliant design

```sql
-- Members table
CREATE TABLE members (
member_id INTEGER PRIMARY KEY,
member_name TEXT NOT NULL,
member_address TEXT
);

-- Venues table
CREATE TABLE venues (
venue_code TEXT PRIMARY KEY,
venue_description TEXT NOT NULL
);

-- Dinners table
CREATE TABLE dinners (
dinner_id TEXT PRIMARY KEY, -- e.g. D00001001
dinner_date DATE NOT NULL,
venue_code TEXT NOT NULL REFERENCES venues(venue_code)
);

-- Foods table
CREATE TABLE foods (
food_code TEXT PRIMARY KEY, -- e.g. C1, S1, P1
food_description TEXT NOT NULL
);

-- Attendance table (members attending dinners, many-to-many)
CREATE TABLE dinner_attendance (
dinner_id TEXT NOT NULL REFERENCES dinners(dinner_id) ON DELETE CASCADE,
member_id INTEGER NOT NULL REFERENCES members(member_id) ON DELETE CASCADE,
PRIMARY KEY (dinner_id, member_id)
);

-- Dinner-foods table (foods served at each dinner, many-to-many)
CREATE TABLE dinner_foods (
dinner_id TEXT NOT NULL REFERENCES dinners(dinner_id) ON DELETE CASCADE,
food_code TEXT NOT NULL REFERENCES foods(food_code) ON DELETE RESTRICT,
PRIMARY KEY (dinner_id, food_code)
);
```
75 changes: 75 additions & 0 deletions Week3/assignment/3.2/transaction.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,75 @@
import dotenv from "dotenv";
import pg from "pg";
dotenv.config();

const { Client } = pg;

const db = new Client({
host: process.env.PGHOST,
port: process.env.PGPORT,
user: process.env.PGUSER,
password: process.env.PGPASSWORD,
database: process.env.PGDATABASE || "research",
});

async function transfer(fromAcc, toAcc, amount) {
await db.query("BEGIN");
try {
// 1) Lock both accounts
const fromRes = await db.query(
"SELECT balance FROM account WHERE account_number = $1 FOR UPDATE",
[fromAcc]
);
const toRes = await db.query(
"SELECT balance FROM account WHERE account_number = $1 FOR UPDATE",
[toAcc]
);
Comment on lines +19 to +26

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Suggestion: I see you want to lock the rows. It is better to always lock the rows based on a specific and predicted order, for example account ID with smaller number will be locked first. In this case the deadlock risk during high concurrency situation can be reduced. Otherwise, if two transactions happen almost the same time (for example account A->B and account B->A) the deadlock might happen.


if (fromRes.rowCount === 0 || toRes.rowCount === 0) {
throw new Error("Account not found");
}

const fromBal = Number(fromRes.rows[0].balance);
if (fromBal < amount) throw new Error("Insufficient funds");

// 2) Update balances
await db.query(
"UPDATE account SET balance = balance - $1 WHERE account_number = $2",
[amount, fromAcc]
);
await db.query(
"UPDATE account SET balance = balance + $1 WHERE account_number = $2",
[amount, toAcc]
);

// 3) Log changes
await db.query(
"INSERT INTO account_changes(account_number, amount, remark) VALUES ($1, $2, $3)",
[fromAcc, -amount, `transfer to ${toAcc}`]
);
await db.query(
"INSERT INTO account_changes(account_number, amount, remark) VALUES ($1, $2, $3)",
[toAcc, amount, `transfer from ${fromAcc}`]
);

await db.query("COMMIT");
console.log(` Transferred ${amount} from ${fromAcc} to ${toAcc}`);
} catch (e) {
await db.query("ROLLBACK");
throw e;
}
}

async function run() {
await db.connect();
try {
await transfer(101, 102, 1000.0);
} finally {
await db.end();
}
}

run().catch((e) => {
console.error("Mistake", e.message);
process.exit(1);
});
45 changes: 45 additions & 0 deletions Week3/assignment/3.2/transactions-create-tables.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
import dotenv from "dotenv";
import pg from "pg";
dotenv.config();
const { Client } = pg;

const db = new Client({
host: process.env.PGHOST,
port: process.env.PGPORT,
user: process.env.PGUSER,
password: process.env.PGPASSWORD,
database: process.env.PGDATABASE || "research",
});

async function run() {
await db.connect();
try {
await db.query(`DROP TABLE IF EXISTS account_changes;`);
await db.query(`DROP TABLE IF EXISTS account;`);

await db.query(`
CREATE TABLE account (
account_number INTEGER PRIMARY KEY,
balance NUMERIC(12,2) NOT NULL CHECK (balance >= 0)
);
`);

await db.query(`
CREATE TABLE account_changes (
change_number BIGSERIAL PRIMARY KEY,
account_number INTEGER NOT NULL REFERENCES account(account_number) ON DELETE CASCADE,
amount NUMERIC(12,2) NOT NULL,
changed_date TIMESTAMPTZ NOT NULL DEFAULT now(),
remark TEXT NOT NULL
);
`);

console.log("tables created");
} finally {
await db.end();
}
}
run().catch((e) => {
console.error(e);
process.exit(1);
});
49 changes: 49 additions & 0 deletions Week3/assignment/3.2/transactions-insert-values.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
// Week3/assignment/transactions-insert-values.js
// Inserts test accounts and initial seed transactions.

import dotenv from "dotenv";
import pg from "pg";
dotenv.config();

const { Client } = pg;

const db = new Client({
host: process.env.PGHOST,
port: process.env.PGPORT,
user: process.env.PGUSER,
password: process.env.PGPASSWORD,
database: process.env.PGDATABASE || "research",
});

async function run() {
await db.connect();
try {
// Reset data
await db.query(`TRUNCATE account_changes, account RESTART IDENTITY;`);

// Insert accounts
await db.query(`
INSERT INTO account(account_number, balance) VALUES
(101, 5000.00),
(102, 2000.00),
(103, 150.00);
`);

// Insert account_changes log
await db.query(`
INSERT INTO account_changes(account_number, amount, remark) VALUES
(101, 5000.00, 'initial funding'),
(102, 2000.00, 'initial funding'),
(103, 150.00, 'initial funding');
`);

console.log("✅ Accounts and seed transactions inserted");
} finally {
await db.end();
}
}

run().catch((e) => {
console.error("Mistake", e.message);
process.exit(1);
});
46 changes: 46 additions & 0 deletions Week3/assignment/3.3-sql-injection.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
import dotenv from "dotenv";
import pg from "pg";
dotenv.config();

const { Client } = pg;

const db = new Client({
host: process.env.PGHOST,
port: process.env.PGPORT,
user: process.env.PGUSER,
password: process.env.PGPASSWORD,
database: "world" // ← world DB with country table
});

async function vulnerableGetPopulation(name, code) {
// ❌ Vulnerable: direct string interpolation
const sql = `SELECT population FROM country WHERE name = '${name}' AND code = '${code}'`;
const res = await db.query(sql);
return res.rows;
}

async function safeGetPopulation(name, code) {
// Safe: parameterized query
const sql = `SELECT population FROM country WHERE name = $1 AND code = $2`;
const res = await db.query(sql, [name, code]);
return res.rows;
}

async function demo() {
await db.connect();

// Example of SQL injection
const hacked = await vulnerableGetPopulation(`' OR '1'='1`, `' OR '1'='1`);
console.log(" Injection result (all rows):", hacked.length);

// Safe version
const normal = await safeGetPopulation("Netherlands", "NLD");
console.log(" Safe query result:", normal);

await db.end();
}

demo().catch(e => {
console.error(e);
process.exit(1);
});
55 changes: 55 additions & 0 deletions Week3/assignment/mongo/index.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
import dotenv from "dotenv";
import { MongoClient } from "mongodb";
dotenv.config();

const uri = process.env.MONGODB_URI;
const dbName = process.env.MONGODB_DB || "databaseWeek3";
const collName = process.env.MONGODB_COLLECTION || "bob_ross_episodes";

async function run() {
// 1) Connect
const client = new MongoClient(uri);
await client.connect();

Comment on lines +9 to +13

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Needs rework: I didn't see the answers for exercises in this file?

try {
const db = client.db(dbName);
const coll = db.collection(collName);

// 2) C — Create (insertOne)
const demoDoc = {
title: "Demo Episode",
elements: ["mountain", "tree", "lake"],
season: 99,
episode: 1,
};
const insertRes = await coll.insertOne(demoDoc);
console.log("Inserted ID:", insertRes.insertedId.toString());

// 3) R — Read (find)
const mountains = await coll
.find({ elements: "mountain" })
.project({ title: 1, season: 1, episode: 1, _id: 0 })
.limit(5)
.toArray();
console.log("Sample with 'mountain':", mountains);

// 4) U — Update (updateMany)
const upd = await coll.updateMany(
{ season: { $gte: 20 } },
{ $set: { hasVintage: true } }
);
console.log("Updated docs:", upd.modifiedCount);

// 5) D — Delete (deleteOne) — remove demo
const del = await coll.deleteOne({ _id: insertRes.insertedId });
console.log("Deleted demo:", del.deletedCount);
} finally {
// 6) Close
await client.close();
}
}

run().catch((e) => {
console.error("Mongo error:", e.message);
process.exit(1);
});
Loading