diff --git a/cli/src/api.rs b/cli/src/api.rs index 11a8b7e3..650de091 100644 --- a/cli/src/api.rs +++ b/cli/src/api.rs @@ -146,6 +146,17 @@ pub async fn delete_server_data( } } +pub async fn refresh_views(http_client: &Client, base_url: &str, dry_run: bool) -> Result<()> { + let url = format!("{}/api/v1/admin/refresh-views", base_url); + if dry_run { + println!("Would refresh views."); + Ok(()) + } else { + let response = http_client.post(url).send().await; + server_response(response).await.map(|_| println!("OK")) + } +} + pub async fn find_metabolite(http_client: &Client, base_url: &str, query: &str) -> Result<()> { let url = format!("{}/api/v1/metabolites/?limit=1&query={}", base_url, query); let res = http_client.get(url).send().await; diff --git a/cli/src/main.rs b/cli/src/main.rs index a23ab63e..6a4234f0 100644 --- a/cli/src/main.rs +++ b/cli/src/main.rs @@ -10,7 +10,7 @@ use clap::{Args, Parser, Subcommand}; use reqwest::{redirect::Policy, ClientBuilder}; use crate::api::{ - delete_server_data, find_metabolite, find_path, get_server_state, get_user_info, + delete_server_data, find_metabolite, find_path, get_server_state, get_user_info, refresh_views, set_server_state, submit_all, submit_biocyc, submit_labellings, }; use crate::data_sources::DataSources; @@ -133,6 +133,8 @@ enum AdminCommand { #[arg(value_parser = clap::builder::PossibleValuesParser::new(["all", "biocyc", "labellings"]))] what: String, }, + + RefreshViews, } #[derive(Debug, Args)] @@ -228,6 +230,14 @@ async fn main() -> Result<()> { ) .await } + AdminCommand::RefreshViews => { + refresh_views( + &client, + &args.global_opts.base_url, + args.global_opts.dry_run, + ) + .await + } }, Command::Query(QueryArgs { command: QueryCommand::Metabolite { query }, diff --git a/server/alembic.ini b/server/alembic.ini index a0f547c6..890ba88c 100644 --- a/server/alembic.ini +++ b/server/alembic.ini @@ -76,7 +76,7 @@ sqlalchemy.url = driver://user:pass@localhost/dbname # Logging configuration [loggers] -keys = root,sqlalchemy,alembic +keys = root,sqlalchemy,alembic,alembic_utils [handlers] keys = console @@ -99,6 +99,11 @@ level = INFO handlers = qualname = alembic +[logger_alembic_utils] +level = INFO +handlers = +qualname = alembic_utils + [handler_console] class = StreamHandler args = (sys.stderr,) diff --git a/server/dries/TTFD.py b/server/dries/TTFD.py index 017f0d68..8364168e 100644 --- a/server/dries/TTFD.py +++ b/server/dries/TTFD.py @@ -68,7 +68,7 @@ # include_labeled_elements = [] #include_labeled_elements = ['x_x____'] -exclude_labeled_elements = [] +exclude_labeled_elements = None """ exclude_labeled_elements = ['x__________________________________________________', '___________x_______________________________________', @@ -89,6 +89,11 @@ pathway_id2name = {'PWY66-398': 'TCA cycle'} +def trace(idx: int, msg: str) -> None: + # if idx in (19,): + # print(f"{idx}: {msg}") + pass + def do_the_thing(data, tracer_metabolite_id, @@ -98,14 +103,13 @@ def do_the_thing(data, num_pathways_filter=None, include_labeled_elements=None, include_labeled_elements_origins=None): - if include_labeled_elements is None: - include_labeled_elements = [] + # print(f"{gauge_num_label_filter=}\n{num_pathways_filter=}\n{include_labeled_elements=}\n{include_labeled_elements_origins=}", flush=True) #i_f = open("data/ALPHA-GLUCOSE__L-LACTATE__30.newformat", 'r') tracer_labeled_atoms = [str(tla) for tla in tracer_labeled_atoms] path_list = [] results = [] - for (idx, datum) in enumerate(data): + for (idx, datum) in enumerate(data, start=1): reaction_info = datum.reaction_info compressed_reaction_path = reaction_info[0] reduced_reaction_list = reaction_info[1] @@ -138,6 +142,7 @@ def do_the_thing(data, emi_list = emi.split('_') emi_list = [l if l in tracer_labeled_atoms else '' for l in emi_list] label_positions = ['x' if l != '' else '' for l in emi_list] + trace(idx, f"{emi} -> {label_positions}") label_positions_txt = '_'.join(label_positions) emi_filtered = '_'.join(emi_list) if label_positions_txt not in labeled_elements_incorporations: @@ -160,27 +165,47 @@ def do_the_thing(data, # the groupings functionality if num_pathways_filter != None: - if min_num_pathways not in num_pathways_filter: continue + if min_num_pathways not in num_pathways_filter: + trace(idx, f"{min_num_pathways} not in {num_pathways_filter} = skipped") + continue + else: + trace(idx, f"{min_num_pathways} in {num_pathways_filter} = passed") if gauge_num_label_filter != None: - if not (set(gauge_num_label_filter) & set(num_labels)): continue + if not (set(gauge_num_label_filter) & set(num_labels)): + trace(idx, f"{set(gauge_num_label_filter)} & {set(num_labels)} = skipped") + continue + else: + trace(idx, f"{set(gauge_num_label_filter)} & {set(num_labels)} = passed") + # else: + # print("*", end="", flush=True) - if include_labeled_elements: + if include_labeled_elements is not None: if not (set(include_labeled_elements) & set(labeled_elements_incorporations)): + trace(idx, f"{include_labeled_elements} & {labeled_elements_incorporations=} = skipped") continue + else: + trace(idx, f"{include_labeled_elements} & {labeled_elements_incorporations=} = passed") + else: + trace(idx, "include_labeled_elements is not defined") - if include_labeled_elements_origins: + if include_labeled_elements_origins is not None: #print(set(include_labeled_elements_origins)) #print(set([emi for emi, _ in end_metabolite_incorporations])) #print("------------------") + emi_set = set([emi for emi, _ in end_metabolite_incorporations]) + #print(f"{idx} ({emi_set=}) {include_labeled_elements_origins[0]}: {set(include_labeled_elements_origins) & emi_set}") if not (set(include_labeled_elements_origins) & set([emi for emi, _ in end_metabolite_incorporations])): + trace(idx, f"{include_labeled_elements_origins} & {emi_set=} = skipped") continue + else: + trace(idx, f"{include_labeled_elements_origins} & {emi_set=} = passed") - if exclude_labeled_elements: + if exclude_labeled_elements is not None: if not (set(labeled_elements_incorporations) - set(exclude_labeled_elements)): continue - if tracer_labeling_filter != None: + if tracer_labeling_filter is not None: if tracer_labeling_filter not in tracer_labeling_incorporations: continue """ @@ -314,4 +339,4 @@ def do_the_thing(data, if __name__ == '__main__': - do_the_thing("L-LACTATE") \ No newline at end of file + do_the_thing("L-LACTATE") diff --git a/server/dries/counts.py b/server/dries/counts.py index 6f11675b..42217b73 100644 --- a/server/dries/counts.py +++ b/server/dries/counts.py @@ -1,10 +1,12 @@ -from typing import List, Tuple +import sys +from collections.abc import Container +from itertools import chain, combinations, product from dries.preload import Datum from dries.TTFD import do_the_thing -def counts_per_label(data: List[Datum], tracer: str, gauge: str, start: List[int]): +def counts_per_label(data: list[Datum], tracer: str, gauge: str, start: list[int]): counts = {} for label in range(1, 50): results = do_the_thing(data, tracer, gauge, start, [label]) @@ -13,30 +15,62 @@ def counts_per_label(data: List[Datum], tracer: str, gauge: str, start: List[int return counts -def mklabeled(gauge: str, labeled_elements: list[int]) -> str: +def metabolite_size(metabolite: str) -> int: size = { + "ALPHA-GLUCOSE": 12, "L-LACTATE": 6, + "GLN": 10, "GLT": 10, "UMP": 21, "RIBULOSE-5P": 14, "PRO": 8, - }[gauge] - return "_".join("x" if p in labeled_elements else "" for p in range(size)) + "PYRUVATE": 6, + } + return size.get(metabolite, 0) + +def metabolite_carbons(metabolite: str) -> int: + size = { + "ALPHA-GLUCOSE": 6, + "GLN": 5, + "GLT": 5, + "L-LACTATE": 3, + "PRO": 5, + "PYRUVATE": 3, + "RIBULOSE-5P": 5, + "UMP": 9 + } + return size.get(metabolite, 0) + +def mklabeled(metabolite: str, labeled_positions: Container[int]) -> str: + return "_".join( + "x" if p in labeled_positions else "" + for p in range(metabolite_size(metabolite)) + ) def counts_per_label_and_pathways( - data: List[Datum], + data: list[Datum], tracer: str, gauge: str, - start: List[int], - label_count: int, - labeled_elements: list[int] | None, -): + start: list[int], + label_count: int | None, + gauge_labeled_positions: list[int] | None, +) -> dict[int, int]: counts = {} - labeled = ( - None - if labeled_elements is None - else [mklabeled(gauge, labeled_elements)] - ) + labeled = None + lc = None + if gauge_labeled_positions: + required = set(gauge_labeled_positions) + label_positions = metabolite_carbons(gauge) + 1 + labeled = set() + + for patterns in chain(combinations(range(label_positions), r=label_count) for label_count in range(1, label_positions + 1)): + for pattern in patterns: + if not set(pattern).issuperset(required): + continue + labeled.add(mklabeled(gauge, list(pattern))) + # assert len(labeled) != 0, f"{label_positions=}, {required=}" + else: + lc = [label_count] for pathway_count in range(1, 9): results = do_the_thing( @@ -44,19 +78,20 @@ def counts_per_label_and_pathways( tracer, gauge, start, # [1,2] (`tracer_labeled_atoms`) - [label_count], # [2] (`gauge_num_label_filter`) + lc, # [label_count], # [2] (`gauge_num_label_filter`) [pathway_count], # [1] (`num_pathways_filter`) include_labeled_elements=labeled, # ["x_x____"] (`include_labeled_elements`) ) # print(pathway_count, results) if len(results) > 0: + # print(pathway_count, results) counts[pathway_count] = len(results) return counts def counts_per_position( - data: List[Datum], tracer: str, gauge: str, start: List[int], positions: List[int] + data: list[Datum], tracer: str, gauge: str, start: list[int], positions: list[int] ): labeling_mask = "_".join(["x" if p in positions else "" for p in range(6)]) counts = {} @@ -72,7 +107,7 @@ def counts_per_position( def counts_per_labeled_position( - data: List[Datum], tracer, gauge, start: List[Tuple[int, int]] + data: list[Datum], tracer, gauge, start: list[tuple[int, int]] ): labels, positions = zip(*start) labeling_mask = "_".join( @@ -88,3 +123,71 @@ def counts_per_labeled_position( counts[pathway_count] = len(results) return counts + + +def counts_per_destination_labeling( + data: list[Datum], + tracer: str, + gauge: str, + label_count: int, +) -> dict[str, int]: + counts = {} + gauge_positions = range(metabolite_size(gauge)) + start = list(gauge_positions) + for gauge_label_positions in combinations(gauge_positions, label_count): + gauge_labels = mklabeled(gauge, gauge_label_positions) + results = do_the_thing( + data, + tracer, + gauge, + start, + [label_count], + num_pathways_filter=None, + include_labeled_elements=[gauge_labels] + ) + if len(results) > 0: + counts[gauge_labels] = len(results) + return counts + +def _all_origin_labelings(tracer: str, gauge: str, label_count: int) -> list[str]: + "size comes from the gauge, labels come from the tracer" + options = [] + possible_labels = list(range(metabolite_carbons(tracer) + 2)) + possible_positions = list(range(metabolite_carbons(gauge) + 1)) + #print(metabolite, start) + check = 0 + for positions in combinations(possible_positions, label_count): + #for labelling in permutations(possible_labels, len(positions)): + for labelling in product(possible_labels, repeat=len(positions)): + ls = {p: l for (p, l) in zip(positions, labelling, strict=True)} + options.append("_".join([str(ls.get(l, "")) for l in range(metabolite_size(gauge))])) + #print(metabolite, options[-1]) + check += 1 + if check > 100_000_000: + raise ValueError("Too large query") + + assert check < 100_000_000, f"CHECK FAILED: {check} from label_count = {label_count}" + # options.append("_".join(str(label) if i == pos else "" for i, (pos, label) in enumerate(zip(positions, labelling, strict=True)))) + return options + +def counts_per_origin_labeling( + data: list[Datum], + tracer: str, + gauge: str, + gauge_label_positions: list[int], +): + gauge_labels = [mklabeled(gauge, gauge_label_positions)] + start = list(range(metabolite_size(tracer))) + origin_labels = _all_origin_labelings(tracer, gauge, len(gauge_label_positions)) + return do_the_thing( + data, + tracer, + gauge, + start, # [1,2] (`tracer_labeled_atoms`) + gauge_num_label_filter=None, # [2] (`gauge_num_label_filter`) + num_pathways_filter=None, # [1] (`num_pathways_filter`) + include_labeled_elements=gauge_labels, # ["x_x____"] (`include_labeled_elements`) + include_labeled_elements_origins=origin_labels + ) + + diff --git a/server/migrations/env.py b/server/migrations/env.py index 55b8c585..0c142dcc 100644 --- a/server/migrations/env.py +++ b/server/migrations/env.py @@ -1,12 +1,17 @@ from logging.config import fileConfig -from sqlalchemy import create_engine - from alembic import context +from alembic_utils.replaceable_entity import register_entities from ttfd.config import settings from ttfd.database import engine -from ttfd.models import Base +from ttfd.models import ( + Base, + gauge_labelling, + intarray_extension, + pg_trgm_extension, + tracer_labelling, +) # this is the Alembic Config object, which provides # access to the values within the .ini file in use. @@ -19,14 +24,20 @@ # add your model's MetaData object here # for 'autogenerate' support -# from myapp import mymodel -# target_metadata = mymodel.Base.metadata target_metadata = Base.metadata # other values from the config, defined by the needs of env.py, # can be acquired: # my_important_option = config.get_main_option("my_important_option") # ... etc. +register_entities( + [gauge_labelling, tracer_labelling, pg_trgm_extension, intarray_extension] +) + + +def include_object(object, name, type_, reflected, compare_to): + """Exclude views from Alembic's consideration.""" + return not object.info.get("is_view", False) def run_migrations_offline() -> None: @@ -46,7 +57,8 @@ def run_migrations_offline() -> None: target_metadata=target_metadata, literal_binds=True, dialect_opts={"paramstyle": "named"}, - render_as_batch=True, + render_as_batch=False, + include_object=include_object, ) with context.begin_transaction(): @@ -62,7 +74,10 @@ def run_migrations_online() -> None: """ with engine.connect() as connection: context.configure( - connection=connection, target_metadata=target_metadata, render_as_batch=True + connection=connection, + target_metadata=target_metadata, + render_as_batch=False, + include_object=include_object, ) with context.begin_transaction(): diff --git a/server/migrations/versions/c24eaab852f4_add_a_gauge_labels_view.py b/server/migrations/versions/c24eaab852f4_add_a_gauge_labels_view.py new file mode 100644 index 00000000..d751f9f0 --- /dev/null +++ b/server/migrations/versions/c24eaab852f4_add_a_gauge_labels_view.py @@ -0,0 +1,48 @@ +"""Add a gauge labels view. + +Revision ID: c24eaab852f4 +Revises: 43b46c8fe17c +Create Date: 2025-09-18 11:56:02.041315 + +""" + +from alembic import op + +from ttfd.models import tracer_labelling as public_tracer_labelling +from ttfd.models import gauge_labelling as public_gauge_labelling +from ttfd.models import intarray_extension as public_intarray + +# revision identifiers, used by Alembic. +revision = "c24eaab852f4" +down_revision = "43b46c8fe17c" +branch_labels = None +depends_on = None + +def upgrade() -> None: + with op.batch_alter_table("sessions", schema=None) as batch_op: + batch_op.drop_index(batch_op.f("ix_api_keys_token")) + batch_op.drop_index(batch_op.f("ix_sessions_token")) + batch_op.create_index(batch_op.f("ix_sessions_token"), ["token"], unique=False) + + with op.batch_alter_table("api_keys", schema=None) as batch_op: + batch_op.create_index(batch_op.f("ix_api_keys_token"), ["token"], unique=False) + + op.create_entity(public_gauge_labelling) # type: ignore [attr-defined] + op.create_entity(public_tracer_labelling) # type: ignore [attr-defined] + + op.create_entity(public_intarray) # type: ignore [attr-defined] + + +def downgrade() -> None: + op.drop_entity(public_intarray) # type: ignore [attr-defined] + + op.drop_entity(public_tracer_labelling) # type: ignore [attr-defined] + op.drop_entity(public_gauge_labelling) # type: ignore [attr-defined] + + with op.batch_alter_table("api_keys", schema=None) as batch_op: + batch_op.drop_index(batch_op.f("ix_api_keys_token")) + + with op.batch_alter_table("sessions", schema=None) as batch_op: + batch_op.drop_index(batch_op.f("ix_sessions_token")) + batch_op.create_index(batch_op.f("ix_sessions_token"), ["token"], unique=True) + batch_op.create_index(batch_op.f("ix_api_keys_token"), ["token"], unique=True) diff --git a/server/migrations/versions/d468630d342c_add_foreign_key_constraint_to_reaction_.py b/server/migrations/versions/d468630d342c_add_foreign_key_constraint_to_reaction_.py index 948ecb9a..8ee96c73 100644 --- a/server/migrations/versions/d468630d342c_add_foreign_key_constraint_to_reaction_.py +++ b/server/migrations/versions/d468630d342c_add_foreign_key_constraint_to_reaction_.py @@ -20,7 +20,7 @@ def upgrade() -> None: """Add a foreign key constraint to reaction_paths.""" - op.execute(sa.text("CREATE EXTENSION pg_trgm;")) + op.execute(sa.text("CREATE EXTENSION IF NOT EXISTS pg_trgm;")) with op.batch_alter_table("metabolites", schema=None) as batch_op: batch_op.alter_column( "mol", @@ -77,3 +77,4 @@ def downgrade() -> None: type_=sa.VARCHAR(), existing_nullable=False, ) + op.execute(sa.text("DROP EXTENSION pg_trgm;")) diff --git a/server/poetry.lock b/server/poetry.lock index cb32d287..d5ed36a6 100644 --- a/server/poetry.lock +++ b/server/poetry.lock @@ -20,6 +20,25 @@ typing-extensions = ">=4.12" [package.extras] tz = ["tzdata"] +[[package]] +name = "alembic-utils" +version = "0.8.8" +description = "A sqlalchemy/alembic extension for migrating procedures and views" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "alembic_utils-0.8.8-py3-none-any.whl", hash = "sha256:2c2545dc545833c5deb63bce2c3cde01c1807bf99da5efab2497bc8d817cb86e"}, + {file = "alembic_utils-0.8.8.tar.gz", hash = "sha256:99de5d13194f26536bc0322f0c1660020a305015700d8447ccfc20e7d1494e5b"}, +] + +[package.dependencies] +alembic = ">=1.9" +flupy = "*" +parse = ">=1.8.4" +sqlalchemy = ">=1.4" +typing_extensions = ">=0.1.0" + [[package]] name = "allpairspy" version = "2.5.1" @@ -682,6 +701,21 @@ typing-extensions = ">=4.8.0" all = ["email-validator (>=2.0.0)", "fastapi-cli[standard] (>=0.0.5)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=3.1.5)", "orjson (>=3.2.1)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.18)", "pyyaml (>=5.3.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0)", "uvicorn[standard] (>=0.12.0)"] standard = ["email-validator (>=2.0.0)", "fastapi-cli[standard] (>=0.0.5)", "httpx (>=0.23.0)", "jinja2 (>=3.1.5)", "python-multipart (>=0.0.18)", "uvicorn[standard] (>=0.12.0)"] +[[package]] +name = "flupy" +version = "1.2.3" +description = "Fluent data processing in Python - a chainable stream processing library for expressive data manipulation using method chaining" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "flupy-1.2.3-py3-none-any.whl", hash = "sha256:be0f5a393bad2b3534697fbab17081993cd3f5817169dd3a61e8b2e0887612e6"}, + {file = "flupy-1.2.3.tar.gz", hash = "sha256:220b6d40dea238cd2d66784c0d4d2a5483447a48acd343385768e0c740af9609"}, +] + +[package.dependencies] +typing_extensions = ">=4" + [[package]] name = "greenlet" version = "3.2.3" @@ -1365,6 +1399,18 @@ files = [ {file = "packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f"}, ] +[[package]] +name = "parse" +version = "1.20.2" +description = "parse() is the opposite of format()" +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "parse-1.20.2-py2.py3-none-any.whl", hash = "sha256:967095588cb802add9177d0c0b6133b5ba33b1ea9007ca800e526f42a85af558"}, + {file = "parse-1.20.2.tar.gz", hash = "sha256:b41d604d16503c79d81af5165155c0b20f6c8d6c559efa66b4b695c3e5a0a0ce"}, +] + [[package]] name = "pathspec" version = "0.12.1" @@ -2501,4 +2547,4 @@ h11 = ">=0.9.0,<1" [metadata] lock-version = "2.1" python-versions = ">=3.12,<3.14" -content-hash = "64178e96b2304ab1e55693c8a90401b8df8317370413664cb7264848a7bc9ffe" +content-hash = "13d3ffd90c969d089df5815e4af81560e2a296e67d0c15e0007a0af17f7423fb" diff --git a/server/pyproject.toml b/server/pyproject.toml index 31eace87..8aa5579c 100644 --- a/server/pyproject.toml +++ b/server/pyproject.toml @@ -29,6 +29,7 @@ apscheduler = "^3.11.0" httpx = "^0.28.1" approvaltests = "^15.1.0" pytest-approvaltests = "^0.2.4" +alembic-utils = "^0.8.8" [tool.poetry.group.dev.dependencies] mypy = "^1.5.1" diff --git a/server/static/atom-hover.js b/server/static/atom-hover.js new file mode 100644 index 00000000..7ff16418 --- /dev/null +++ b/server/static/atom-hover.js @@ -0,0 +1,27 @@ +document.querySelectorAll(".ttfd-table-row")?.forEach((element) => { + element.addEventListener("mouseover", (e) => { + const gaugeIds = e.currentTarget.dataset["gaugeIds"]?.split(","); + if (gaugeIds) { + const selector = gaugeIds.map((id) => `g#gauge-${id}`).join(", "); + document.querySelectorAll(selector).forEach((el) => el.classList.add("marked")); + } + const tracerIds = e.currentTarget.dataset["tracerIds"]?.split(","); + + if (tracerIds) { + const selector = tracerIds.map((id) => `g#tracer-${id}`).join(", "); + document.querySelectorAll(selector).forEach((el) => el.classList.add("marked")); + } + }); + element.addEventListener("mouseout", (e) => { + const gaugeIds = e.currentTarget.dataset["gaugeIds"]?.split(","); + if (gaugeIds) { + const selector = gaugeIds.map((id) => `g#gauge-${id}`).join(", "); + document.querySelectorAll(selector).forEach((el) => el.classList.remove("marked")); + } + const tracerIds = e.currentTarget.dataset["tracerIds"]?.split(","); + if (tracerIds) { + const selector = tracerIds.map((id) => `g#tracer-${id}`).join(", "); + document.querySelectorAll(selector).forEach((el) => el.classList.remove("marked")); + } + }); +}) diff --git a/server/static/styles.css b/server/static/styles.css index 0adb0416..05573a91 100644 --- a/server/static/styles.css +++ b/server/static/styles.css @@ -80,6 +80,7 @@ tr.hx-swapping td { min-height: 500px; justify-content: flex-start !important; align-items: flex-end !important; + gap: 0 !important; } .ttfd-loading-indicator { @@ -130,7 +131,6 @@ span.marked { flex-direction: row; justify-content: space-between; align-items: center; - margin-bottom: 2rem; padding-left: 3rem; padding-right: 3rem; position: sticky; @@ -156,7 +156,7 @@ span.marked { gap: 5px; position: absolute; left: 0; - top: 1rem; + top: 0; } .ttfd-button { @@ -182,10 +182,9 @@ span.marked { border-radius: 4px; border: 1px solid rgba(0, 0, 0, 0.12); overflow: hidden; - padding: 16px; width: 220px; height: 220px; - padding: 10px; + padding: 0px; display: flex; flex-direction: column; justify-content: space-between; @@ -617,119 +616,14 @@ input#ttfd-filter-pathways:checked ~ div#ttfd-filter-pathways-tab { } /* Atom highlight hovering and animations */ -/* Atom 0 */ -.ttfd-table:has(.ttfd-hover-gauge-0:hover) ~.ttfd-summary g#gauge-0 > circle { - fill: #1cbbba; -} - -.ttfd-table:has(.ttfd-hover-gauge-0:hover) ~.ttfd-summary g#gauge-0 > text { - fill: white; -} - -.ttfd-table:has(.ttfd-hover-gauge-0:hover) ~.ttfd-summary g#gauge-0 > text.ttfd-atom-index { - fill: #1cbbba; -} - -/* Atom 1 */ -.ttfd-table:has(.ttfd-hover-gauge-1:hover) ~.ttfd-summary g#gauge-1 > circle { - fill: #1cbbba; -} - -.ttfd-table:has(.ttfd-hover-gauge-1:hover) ~.ttfd-summary g#gauge-1 > text { - fill: white; -} - -.ttfd-table:has(.ttfd-hover-gauge-1:hover) ~.ttfd-summary g#gauge-1 > text.ttfd-atom-index { - fill: #1cbbba; -} - -/* Atom 2 */ -.ttfd-table:has(.ttfd-hover-gauge-2:hover) ~.ttfd-summary g#gauge-2 > circle { - fill: #1cbbba; -} - -.ttfd-table:has(.ttfd-hover-gauge-2:hover) ~.ttfd-summary g#gauge-2 > text { - fill: white; -} - -.ttfd-table:has(.ttfd-hover-gauge-2:hover) ~.ttfd-summary g#gauge-2 > text.ttfd-atom-index { - fill: #1cbbba; -} - -/* Atom 3 */ -.ttfd-table:has(.ttfd-hover-gauge-3:hover) ~.ttfd-summary g#gauge-3 > circle { - fill: #1cbbba; -} - -.ttfd-table:has(.ttfd-hover-gauge-3:hover) ~.ttfd-summary g#gauge-3 > text { - fill: white; -} - -.ttfd-table:has(.ttfd-hover-gauge-3:hover) ~.ttfd-summary g#gauge-3 > text.ttfd-atom-index { - fill: #1cbbba; -} - -/* Atom 4 */ -.ttfd-table:has(.ttfd-hover-gauge-4:hover) ~.ttfd-summary g#gauge-4 > circle { - fill: #1cbbba; -} - -.ttfd-table:has(.ttfd-hover-gauge-4:hover) ~.ttfd-summary g#gauge-4 > text { - fill: white; -} - -.ttfd-table:has(.ttfd-hover-gauge-4:hover) ~.ttfd-summary g#gauge-4 > text.ttfd-atom-index { - fill: #1cbbba; -} - -/* Atom 5 */ -.ttfd-table:has(.ttfd-hover-gauge-5:hover) ~.ttfd-summary g#gauge-5 > circle { - fill: #1cbbba; -} - -.ttfd-table:has(.ttfd-hover-gauge-5:hover) ~.ttfd-summary g#gauge-5 > text { - fill: white; -} - -.ttfd-table:has(.ttfd-hover-gauge-5:hover) ~.ttfd-summary g#gauge-5 > text.ttfd-atom-index { - fill: #1cbbba; -} - -/* Atom 6 */ -.ttfd-table:has(.ttfd-hover-gauge-6:hover) ~.ttfd-summary g#gauge-6 > circle { - fill: #1cbbba; -} - -.ttfd-table:has(.ttfd-hover-gauge-6:hover) ~.ttfd-summary g#gauge-6 > text { - fill: white; -} - -.ttfd-table:has(.ttfd-hover-gauge-6:hover) ~.ttfd-summary g#gauge-6 > text.ttfd-atom-index { - fill: #1cbbba; -} - -/* Atom 7 */ -.ttfd-table:has(.ttfd-hover-gauge-7:hover) ~.ttfd-summary g#gauge-7 > circle { - fill: #1cbbba; -} - -.ttfd-table:has(.ttfd-hover-gauge-7:hover) ~.ttfd-summary g#gauge-7 > text { - fill: white; -} - -.ttfd-table:has(.ttfd-hover-gauge-7:hover) ~.ttfd-summary g#gauge-7 > text.ttfd-atom-index { - fill: #1cbbba; -} - -/* Atom 8 */ -.ttfd-table:has(.ttfd-hover-gauge-8:hover) ~.ttfd-summary g#gauge-8 > circle { +.ttfd-summary g[class="marked"] > circle { fill: #1cbbba; } -.ttfd-table:has(.ttfd-hover-gauge-8:hover) ~.ttfd-summary g#gauge-8 > text { +.ttfd-summary g[class="marked"] > text { fill: white; } -.ttfd-table:has(.ttfd-hover-gauge-8:hover) ~.ttfd-summary g#gauge-8 > text.ttfd-atom-index { +.ttfd-summary g[class="marked"] > text.ttfd-atom-index { fill: #1cbbba; } diff --git a/server/templates/_base.html b/server/templates/_base.html index 1236f78e..fcc461dd 100644 --- a/server/templates/_base.html +++ b/server/templates/_base.html @@ -82,11 +82,11 @@ {% if ttfd.maintenance_mode %} {% else %} - {% include "_summary.html" %}
{% block main required %} {% endblock %}
+ {% include "_summary.html" %} {% endif %}