diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 5860e25..7fa0b92 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -5,7 +5,7 @@ version: 2 updates: - - package-ecosystem: "pip" # See documentation for possible values + - package-ecosystem: "npm" # See documentation for possible values directory: "/" # Location of package manifests schedule: interval: "weekly" diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 85ad93f..e005859 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -26,8 +26,6 @@ jobs: # Single deploy job since we're just deploying deploy: runs-on: ubuntu-latest - env: - POETRY_VIRTUALENVS_CREATE: false environment: name: github-pages url: ${{ steps.deployment.outputs.page_url }} @@ -36,73 +34,21 @@ jobs: uses: actions/checkout@v6 with: fetch-depth: 0 - - name: Setup Python - uses: actions/setup-python@v6 + - name: Setup Node.js + uses: actions/setup-node@v4 with: - python-version: '3.12' - - name: Temporarily rename pyproject from for_developers - run: | - mv pyproject.toml pyproject_docs.toml - mv poetry.lock poetry_docs.lock - - name: Clone backend repo - uses: actions/checkout@v6 - with: - repository: 'yaptide/yaptide' - path: 'backend_repository' - sparse-checkout: | - yaptide/ - tests/ - yaptide_tester/ - pyproject.toml - poetry.lock - pytest.ini - - name: move yaptide and tests to root dir wiht dependencies specification - run: | - mv backend_repository/yaptide . - mv backend_repository/tests . - mv backend_repository/yaptide_tester . - mv backend_repository/pyproject.toml . - mv backend_repository/poetry.lock . - mv backend_repository/pytest.ini . - - name: Update pip and install poetry - run: | - python3 -m pip install --upgrade pip - pip install poetry - - name: Install dependencies for production - run: poetry install --only main - - name: Install dependencies for tests - run: poetry install --only test - - name: Get demo simulators for Linux - run: poetry run python yaptide/admin/simulators.py download-shieldhit --dir bin/ - timeout-minutes: 2 - - name: Run tests - uses: nick-fields/retry@v3 - with: - timeout_minutes: 25 - max_attempts: 10 - retry_on: error - command: poetry run pytest --cov-report html:htmlcov --cov=yaptide - - name: Remove unused pyproject and dependencies - run: | - rm pyproject.toml - rm poetry.lock - - name: Bring back original dependencies - run: | - mv pyproject_docs.toml pyproject.toml - mv poetry_docs.lock poetry.lock - - name: Install dependencies for documentation - run: | - poetry lock - poetry install + node-version: '20' + - name: Install dependencies + run: npm install - name: Build site - run: mkdocs build + run: npm run build - name: Setup Pages uses: actions/configure-pages@v5 - name: Upload artifact if on main if: github.event_name == 'push' && github.ref == 'refs/heads/main' uses: actions/upload-pages-artifact@v4 with: - path: 'site' + path: 'dist' - name: Deploy to GitHub Pages if on main if: github.event_name == 'push' && github.ref == 'refs/heads/main' id: deployment diff --git a/.gitignore b/.gitignore index c9490a5..6240da8 100644 --- a/.gitignore +++ b/.gitignore @@ -1 +1,21 @@ -/site +# build output +dist/ +# generated types +.astro/ + +# dependencies +node_modules/ + +# logs +npm-debug.log* +yarn-debug.log* +yarn-error.log* +pnpm-debug.log* + + +# environment variables +.env +.env.production + +# macOS-specific files +.DS_Store diff --git a/.vscode/extensions.json b/.vscode/extensions.json new file mode 100644 index 0000000..22a1505 --- /dev/null +++ b/.vscode/extensions.json @@ -0,0 +1,4 @@ +{ + "recommendations": ["astro-build.astro-vscode"], + "unwantedRecommendations": [] +} diff --git a/.vscode/launch.json b/.vscode/launch.json new file mode 100644 index 0000000..d642209 --- /dev/null +++ b/.vscode/launch.json @@ -0,0 +1,11 @@ +{ + "version": "0.2.0", + "configurations": [ + { + "command": "./node_modules/.bin/astro dev", + "name": "Development server", + "request": "launch", + "type": "node-terminal" + } + ] +} diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..f288702 --- /dev/null +++ b/LICENSE @@ -0,0 +1,674 @@ + GNU GENERAL PUBLIC LICENSE + Version 3, 29 June 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU General Public License is a free, copyleft license for +software and other kinds of works. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +the GNU General Public License is intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. We, the Free Software Foundation, use the +GNU General Public License for most of our software; it applies also to +any other work released this way by its authors. You can apply it to +your programs, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + To protect your rights, we need to prevent others from denying you +these rights or asking you to surrender the rights. Therefore, you have +certain responsibilities if you distribute copies of the software, or if +you modify it: responsibilities to respect the freedom of others. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must pass on to the recipients the same +freedoms that you received. You must make sure that they, too, receive +or can get the source code. And you must show them these terms so they +know their rights. + + Developers that use the GNU GPL protect your rights with two steps: +(1) assert copyright on the software, and (2) offer you this License +giving you legal permission to copy, distribute and/or modify it. + + For the developers' and authors' protection, the GPL clearly explains +that there is no warranty for this free software. For both users' and +authors' sake, the GPL requires that modified versions be marked as +changed, so that their problems will not be attributed erroneously to +authors of previous versions. + + Some devices are designed to deny users access to install or run +modified versions of the software inside them, although the manufacturer +can do so. This is fundamentally incompatible with the aim of +protecting users' freedom to change the software. The systematic +pattern of such abuse occurs in the area of products for individuals to +use, which is precisely where it is most unacceptable. Therefore, we +have designed this version of the GPL to prohibit the practice for those +products. If such problems arise substantially in other domains, we +stand ready to extend this provision to those domains in future versions +of the GPL, as needed to protect the freedom of users. + + Finally, every program is threatened constantly by software patents. +States should not allow patents to restrict development and use of +software on general-purpose computers, but in those that do, we wish to +avoid the special danger that patents applied to a free program could +make it effectively proprietary. To prevent this, the GPL assures that +patents cannot be used to render the program non-free. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Use with the GNU Affero General Public License. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU Affero General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the special requirements of the GNU Affero General Public License, +section 13, concerning interaction through a network will apply to the +combination as such. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + + If the program does terminal interaction, make it output a short +notice like this when it starts in an interactive mode: + + Copyright (C) + This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate +parts of the General Public License. Of course, your program's commands +might be different; for a GUI interface, you would use an "about box". + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU GPL, see +. + + The GNU General Public License does not permit incorporating your program +into proprietary programs. If your program is a subroutine library, you +may consider it more useful to permit linking proprietary applications with +the library. If this is what you want to do, use the GNU Lesser General +Public License instead of this License. But first, please read +. diff --git a/README.md b/README.md index 6437647..d7847da 100644 --- a/README.md +++ b/README.md @@ -1,2 +1,49 @@ # Yaptide developer documentation -https://yaptide.github.io/for_developers/ + +[](https://yaptide.github.io/for_developers/) + +Welcome to the YAPTIDE developer documentation. This site is built with [Starlight](https://starlight.astro.build/) framework. + + +## Getting Started + +To get a local copy up and running, follow these steps. + +### Prerequisites +Make sure you have [Node.js](https://nodejs.org/) installed (version 20 or higher). + +### Installation + +1. **Clone the repo** +```bash +git clone https://github.com/yaptide/for_developers.git +``` + +2. **Install dependencies** +```bash +cd for_developers +npm install +``` + +3. **Start the development server** +```bash +npm run dev +``` + +The site will be live at `http://localhost:4321/for_developers`. Changes to your files will be reflected instantly. + +--- + +## Deployment + +To build for production, run: + +```bash +npm run build +``` + +To preview the build, run: + +```bash +npm run preview +``` diff --git a/astro.config.mjs b/astro.config.mjs new file mode 100644 index 0000000..bae199d --- /dev/null +++ b/astro.config.mjs @@ -0,0 +1,158 @@ +// @ts-check +import { defineConfig } from "astro/config"; +import starlight from "@astrojs/starlight"; + +// https://astro.build/config +export default defineConfig({ + site: "https://yaptide.github.io", + base: "/for_developers", + integrations: [ + starlight({ + title: "YAPTIDE Developers", + tagline: "A web-based IDE for Monte Carlo particle transport simulations", + logo: { + light: "./src/assets/yaptide-logo.svg", + dark: "./src/assets/yaptide-logo.svg", + replacesTitle: false, + }, + favicon: "/favicon.svg", + social: [ + { icon: "github", label: "GitHub", href: "https://github.com/yaptide" }, + ], + customCss: ["./src/styles/custom.css"], + editLink: { + baseUrl: "https://github.com/yaptide/for_developers/edit/main/", + }, + sidebar: [ + { + label: "Home", + slug: "", + }, + { + label: "Local Setup", + items: [ + { + label: "Frontend Demo", + slug: "local-setup/local-frontend-demo", + }, + { + label: "Full Stack - Celery Workers", + slug: "local-setup/local-celery", + }, + { + label: "Full Stack - SLURM", + slug: "local-setup/local-slurm", + } + ], + }, + { + label: "Docker Setup", + items: [ + { + label: "Frontend Demo", + slug: "docker-setup/docker-frontend-demo", + }, + { + label: "Full Stack - Celery Workers", + slug: "docker-setup/docker-celery", + }, + { + label: "Full Stack - SLURM", + slug: "docker-setup/docker-slurm", + }, + ], + }, + { + label: "Architecture", + items: [ + { label: "System Overview", slug: "architecture/overview" }, + { label: "Data Flow", slug: "architecture/data-flow" }, + { + label: "Project JSON Schema", + slug: "architecture/project-json-schema", + }, + { label: "Authentication Model", slug: "architecture/auth-model" }, + ], + }, + { + label: "Contributing", + collapsed: false, + items: [ + { label: "Contribution Guide", slug: "contributing/guide" }, + { label: "Code Style", slug: "contributing/code-style" }, + { label: "Glossary", slug: "contributing/glossary" }, + ], + }, + { + label: "Backend", + collapsed: true, + items: [ + { label: "Overview", slug: "backend/overview" }, + { label: "API Endpoints", slug: "backend/api-endpoints" }, + { label: "Database", slug: "backend/database" }, + { + label: "Simulation Lifecycle", + slug: "backend/simulation-lifecycle", + }, + { + label: "Simulator Management", + slug: "backend/simulator-management", + }, + { + label: "Docker Deployment", + slug: "backend/docker-deployment", + }, + { label: "Testing", slug: "backend/testing" }, + ], + }, + { + label: "Frontend", + collapsed: true, + items: [ + { label: "Overview", slug: "frontend/overview" }, + { label: "3D Editor", slug: "frontend/3d-editor" }, + { + label: "Simulation Services", + slug: "frontend/simulation-services", + }, + { + label: "Pyodide Converter", + slug: "frontend/pyodide-converter", + }, + { label: "Geant4 WebAssembly", slug: "frontend/geant4-wasm" }, + { label: "Auth Flows", slug: "frontend/auth-flows" }, + { label: "Adding Commands", slug: "frontend/adding-commands" }, + { label: "Testing", slug: "frontend/testing" }, + ], + }, + { + label: "Converter", + collapsed: true, + items: [ + { label: "Overview", slug: "converter/overview" }, + { label: "Conversion Flow", slug: "converter/conversion-flow" }, + { + label: "Adding a Simulator", + slug: "converter/adding-a-simulator", + }, + { label: "SHIELD-HIT12A", slug: "converter/shieldhit" }, + { label: "FLUKA", slug: "converter/fluka" }, + { label: "Geant4", slug: "converter/geant4" }, + { label: "Testing", slug: "converter/testing" }, + ], + }, + { + label: "API Reference", + collapsed: true, + items: [ + { label: "Overview", slug: "api-reference/overview" }, + { label: "Authentication", slug: "api-reference/auth" }, + { label: "Jobs", slug: "api-reference/jobs" }, + { label: "Results", slug: "api-reference/results" }, + { label: "User", slug: "api-reference/user" }, + ], + }, + ], + }), + ], +}); diff --git a/docs/backend/for_developers.md b/docs/backend/for_developers.md deleted file mode 100644 index 896669b..0000000 --- a/docs/backend/for_developers.md +++ /dev/null @@ -1,330 +0,0 @@ -# For developers - -Project make use of poetry for dependency management. If you do not have it installed, check official [poetry installation guide](https://python-poetry.org/docs/). -Project is configured to create virtual environment for you, so you do not need to worry about it. -Virtual environment is created in `.venv` folder in the root of the project. - -## Installing dependencies - -To install all dependencies, run: - -```bash -poetry install -``` - -This will install all the dependencies including `test` and `docs` ones. -If you want to test app, you do not need `docs` dependencies, you can skip them by using: - -```bash -poetry install --without docs -``` - -If you want to install only main dependencies, you can use: - -```bash -poetry install --only main,test -``` -(There can't be space after comma in above command) - -## Building and running the app - -Application consists of multiple components. Following instruction will guide you through the process of set up and running the application. - -Here is a flowchart that shows the various dependencies between the different components of the application. - -```mermaid -flowchart LR - id0[Redis]-->id1[Celery simulation worker]-->id2[Flask app] - id2-->id0 -``` - -1. Download SHIELD-HIT12A simulator - - Currently, we store binaries of simulators on S3 filesystem. SHIELD-HIT12A (full version) and Fluka files are encrypted. - - To simply init download process we have to run following commands: - - === "Linux" - - ```bash - poetry run yaptide/admin/simulators.py download-shieldhit --dir bin - ``` - - === "Windows (PowerShell)" - - ```powershell - poetry run yaptide\admin\simulators.py download-shieldhit --dir bin - ``` - - To get full instruction of command usage we can type - - === "Linux" - - ```bash - poetry run yaptide/admin/simulators.py - ``` - - === "Windows (PowerShell)" - - ```powershell - poetry run yaptide\admin\simulators.py - ``` - - -2. Get the redis - If you already use it just start it on port `6379` - - If not good solution would comes with help of docker, run the following commands: - - ```bash - docker run --detach --publish 6379:6379 --name yaptide_redis redis:7-alpine - ``` - - To remove this container use: - - ```bash - docker rm -f yaptide_redis - ``` - -3. Run Celery simulation-worker - - You can reuse the same terminal, as for redis, as docker sends redis process to the background - - === "Linux" - - ```bash - PATH=$PATH:bin BACKEND_INTERNAL_URL=http://127.0.0.1:5000 CELERY_BROKER_URL=redis://127.0.0.1:6379/0 CELERY_RESULT_BACKEND=redis://127.0.0.1:6379/0 poetry run celery --app yaptide.celery.simulation_worker worker --events -P eventlet --hostname yaptide-simulation-worker --queues simulations --loglevel=debug - ``` - - === "Windows (PowerShell)" - - ```powershell - $Env:PATH += ";" + (Join-Path -Path (Get-Location) -ChildPath "bin"); $env:BACKEND_INTERNAL_URL="http://127.0.0.1:5000"; $env:CELERY_BROKER_URL="redis://127.0.0.1:6379/0"; $env:CELERY_RESULT_BACKEND="redis://127.0.0.1:6379/0"; poetry run celery --app yaptide.celery.simulation_worker worker --events -P eventlet --hostname yaptide-simulation-worker --queues simulations --loglevel=debug - ``` - - -4. Run Celery helper-worker - - === "Linux" - - ```bash - FLASK_SQLALCHEMY_DATABASE_URI=sqlite:///db.sqlite BACKEND_INTERNAL_URL=http://127.0.0.1:5000 CELERY_BROKER_URL=redis://127.0.0.1:6379/0 CELERY_RESULT_BACKEND=redis://127.0.0.1:6379/0 poetry run celery --app yaptide.utils.helper_worker worker --events --hostname yaptide-helper-worker --queues helper --loglevel=debug - ``` - - === "Windows (PowerShell)" - - ```powershell - $env:FLASK_SQLALCHEMY_DATABASE_URI="sqlite:///db.sqlite"; $env:BACKEND_INTERNAL_URL="http://127.0.0.1:5000"; $env:CELERY_BROKER_URL="redis://127.0.0.1:6379/0"; $env:CELERY_RESULT_BACKEND="redis://127.0.0.1:6379/0"; poetry run celery --app yaptide.utils.helper_worker worker --events --hostname yaptide-helper-worker --queues helper --loglevel=debug - ``` - - -5. Run the app - - === "Linux" - - ```bash - FLASK_USE_CORS=True FLASK_SQLALCHEMY_DATABASE_URI="sqlite:///db.sqlite" CELERY_BROKER_URL=redis://127.0.0.1:6379/0 CELERY_RESULT_BACKEND=redis://127.0.0.1:6379/0 poetry run flask --app yaptide.application run - ``` - - === "Windows (PowerShell)" - - ```powershell - $env:FLASK_USE_CORS="True"; $env:FLASK_SQLALCHEMY_DATABASE_URI="sqlite:///db.sqlite"; $env:CELERY_BROKER_URL="redis://127.0.0.1:6379/0"; $env:CELERY_RESULT_BACKEND="redis://127.0.0.1:6379/0"; poetry run flask --app yaptide.application run - ``` - - - This command will create `db.sqlite` inside `./instance` folder. This is [default Flask behavior](https://flask.palletsprojects.com/en/3.0.x/config/#instance-folders). - - To get more debugging information you can also force SQLALCHEMY to use `echo` mode by setting `SQLALCHEMY_ECHO` environment variable to `True`. - - === "Linux" - - ```bash - FLASK_SQLALCHEMY_ECHO=True FLASK_USE_CORS=True FLASK_SQLALCHEMY_DATABASE_URI="sqlite:///db.sqlite" CELERY_BROKER_URL=redis://127.0.0.1:6379/0 CELERY_RESULT_BACKEND=redis://127.0.0.1:6379/0 poetry run flask --app yaptide.application run - ``` - - === "Windows (PowerShell)" - - ```powershell - $env:FLASK_SQLALCHEMY_ECHO="True"; $env:FLASK_USE_CORS="True"; $env:FLASK_SQLALCHEMY_DATABASE_URI="sqlite:///db.sqlite"; $env:CELERY_BROKER_URL="redis://127.0.0.1:6379/0"; $env:CELERY_RESULT_BACKEND="redis://127.0.0.1:6379/0"; poetry run flask --app yaptide.application run - ``` - - To include debugging messages from flask, add `--debug` option to the command. - - While running backend and frontend, developer may encounter [Cross-Origin Resource Sharing (CORS)](https://developer.mozilla.org/en-US/docs/Web/HTTP/CORS) errors in web browser's console that prevent communication to the server. To resolve these CORS issues, one should set FLASK_USE_CORS=True in the `.env` file (notice that it's already included in above command). Also pay attention if your frontend runs on ```http://127.0.0.1:3000``` or ```http://localhost:3000```, because right now cors_config in application.py specifies these URLs. - - -## Database - -To add user, run: - -=== "Linux" - - ```bash - FLASK_SQLALCHEMY_DATABASE_URI="sqlite:///instance/db.sqlite" poetry run yaptide/admin/db_manage.py add-user admin --password password - ``` - -=== "Windows (PowerShell)" - - ```powershell - $env:FLASK_SQLALCHEMY_DATABASE_URI="sqlite:///instance/db.sqlite"; poetry run yaptide\admin\db_manage.py add-user admin --password password - ``` - -You can use the following command, to get more information: - -=== "Linux" - - ```bash - FLASK_SQLALCHEMY_DATABASE_URI="sqlite:///instance/db.sqlite" poetry run yaptide/admin/db_manage.py --help - ``` - -=== "Windows (PowerShell)" - - ```powershell - $env:FLASK_SQLALCHEMY_DATABASE_URI="sqlite:///instance/db.sqlite"; poetry run yaptide\admin\db_manage.py --help - ``` - -## Set up Keycloak - -To use SSO login, you need to either connect to a running instance of Keycloak that is configured to allow cross origin requests or set up Keycloak locally. -The latter is relatively easy to do with docker. The most basic configuration (not suitable for production) is as follows: - -1. Start the Keycloak docker container - - ```bash - docker run -p 8080:8080 -e KC_HTTP_RELATIVE_PATH=/auth -e KC_BOOTSTRAP_ADMIN_USERNAME=admin -e KC_BOOTSTRAP_ADMIN_PASSWORD=password quay.io/keycloak/keycloak:26.1.4 start-dev - ``` - where `KC_HTTP_RELATIVE_PATH` specifies the prefix for api calls (/auth/foo/bar in this case to match what PLGrid is using), - `KC_BOOTSTRAP_ADMIN_USERNAME` and `KC_BOOTSTRAP_ADMIN_PASSWORD` are default admin username and password, respectively. - -2. Configure the realm - - The realm is a context in which users and groups exist and are given roles. - - * Navigate to localhost:8080 and login as a default admin. Open the dropdown in the upper left corner - which says 'Keycloak / master' and click 'Create realm'. - * Specify the realm name (i.e. 'yaptide') and click 'Create' - * Now the dropdown should change to 'yaptide' indicating that this is the selected realm - -3. Configure the client - - The client is the application that wants to authenticate users via Keycloak. - - * Navigate to 'Clients' and click 'Create client'. - * Name the client (i.e. 'yaptide-app'). Only the Client ID is required. - * Click 'Next' to go to 'Capability config'. For the purpose of developing, you can leave it as-is and click 'Next' again. - * In 'Login settings' fill in all fields with correct URL to your application, otherwise Keycloak will - return 4XX HTTP errors on all requests. Change the following to your needs: - * Root URL: http://127.0.0.1:3000/ - * Home URL: http://127.0.0.1:3000/ - * Valid redirect URIs: http://127.0.0.1:3000/* - * Web origins: * - * Admin URL: http://127.0.0.1:3000/ - * Click 'Save' - - **Notice that `localhost` and `127.0.0.1` are NOT interchangeable, use the same URL when you navigate to your app in the browser** - -4. Configure the user - - * Navigate to 'Users' - * Fill in the form however you like and click 'Create' - * After creating the user, in 'User details' page, click the 'Credentials' tab - * Click the 'Set password' button, unset the 'Temporary' switch and click 'Save' - -5. Add required flags to user token - - Yaptide checks if user token has `plgridAccessServices` field with `PLG_YAPTIDE_ACCESS` flag set. We need to add it. - * Navigate to 'Client scopes' - * From the list, select 'profile' and go to 'Mappers' tab - * Click 'Add mapper' > 'By configuration' > 'User Attribute' - * Set 'Name' and 'User Attribute' to **EXACTLY** 'plgrid_access_services' - * Set 'Token Claim Name' to **EXACTLY** 'plgridAccessServices' - * Click 'Save' - * Navigate to 'Realm settings' and set 'Unmanaged Attributes' to 'Enabled' and click 'Save' - * Navigate to Users and select your user - * Click the 'Attributes' tab. If there is no such tab, ensure the 'Unmanaged Attributes' in 'Realm settings' is enabled - * Click 'Add attributes' - * Set Key: 'plgrid_access_services' Value: 'PLG_YAPTIDE_ACCESS' and click 'Save' - -6. Set environment variables for front-end - ``` - REACT_APP_ALT_AUTH='plg' - REACT_APP_KEYCLOAK_BASE_URL='http://127.0.0.1:8080' - REACT_APP_KEYCLOAK_REALM='yaptide' - REACT_APP_KEYCLOAK_CLIENT_ID='yaptide-app' - REACT_APP_BACKEND_URL='http://127.0.0.1:5000' - ``` - -7. Set environment variables for back-end - ``` - KEYCLOAK_BASE_URL='http://127.0.0.1:8080' - KEYCLOAK_REALM='yaptide' - KEYCLOAK_CLIENT_ID='yaptide-app' - ``` - **Again, make sure to match `127.0.0.1` or `localhost` and use your realm and client id** - -8. Restart both back-end and front-end. **Navigate to the correct URL**. 'CONNECT WITH PLGRID' should be enabled. Try logging in. - -## Testing - -To run tests use: - -=== "Linux" - - ```shell - poetry run pytest - ``` - -=== "Windows (PowerShell)" - On Windows you need to run tests one by one: - - ```shell - Get-ChildItem -Path "tests" -Filter "test_*.py" -Recurse | foreach { poetry run pytest $_.FullName } - ``` - - -## Development - -To maintain code quality, we use yapf. -To avoid running it manually we strongly recommend to use pre-commit hooks. To install it run: - -```shell -poetry run pre-commit install -``` - -### Pre-commit Use Cases - -- **Commit Changes**: Commit your changes using `git commit` in terminal or using `GUI Git client` in your IDE. - -### Case 1: All Hooks Pass Successfully - -- **Pre-commit Hooks Run**: Before the commit is finalized, pre-commit will automatically run all configured hooks. If all hooks pass without any issues, the commit proceeds as usual. - -### Case 2: Some Hooks Fail - -- **Pre-commit Hooks Run**: Before the commit is finalized, pre-commit will automatically run all configured hooks. If one or more hooks fail, pre-commit will abort the commit process. - - - **terminal** - all issues will be listed in terminal with `Failed` flag - - **VS Code** - you will get error popup, click on `show command output` alle issues will be presented in the same way as they would appear in the terminal. - -- **Fix Issues**: Address the issues reported by the failed hooks. Some hooks automatically format code so you don't have to change anything. Once the issues are fixed, commit once more. - -### YAPF - -Out main use of pre-comit is yapf which is Python code formatter that automatically formats Python code according to predefined style guidelines. We can specify styles for yapf in `[tool/yapf]` section of `pyproject.toml` file. The goal of using yapf is to always produce code that is following the chosen style guidelines. - -### Running pre-commit manually - -To manually run all pre-commit hooks on repository use: -```shell -pre-commit run --all-files -``` -If you wnat to run specific hook use: -```shell -pre-commit run -``` - Each `hook_id` tag is specified in `.pre-commit-config.yaml` file. It is recommended to use these commands after adding new hook to your config in order to check already existing files. - -### Custom hooks - -Pre-commit allows creating custom hooks by writing script in preffered language which is supported by pre-commit and adding it to `.pre-commit-config.yaml`. In yaptide we use custom hook which checks for not empty env files. This hook prevents user from commiting and pushing to repository secrets such as passwords. diff --git a/docs/backend/ghcr_packages.md b/docs/backend/ghcr_packages.md deleted file mode 100644 index db2d5b3..0000000 --- a/docs/backend/ghcr_packages.md +++ /dev/null @@ -1,38 +0,0 @@ -# Docker images on GHCR - -GitHub Container Registry is an organisation-scoped place where Docker containers can be stored and then pulled from freely in GitHub Actions and solutions like gitpod.io or GitHub Codespaces. Yaptide's packages are private and can be accessed only by the organisation members. - -## Deployment - -Docker images for backend (Flask), simulation worker can be automatically built and deployed to ghcr.io registry. Building and deployment are handled by GitHub Actions. There are two methods: - -- automatic action triggered after every commit to the master, -- on-demand action triggered by `/deploy-flask` or `/deploy-simulation-worker` comment, typed by user in the Pull Request discussion. - -Images from master provide a way to quickly deploy stable version of backend part of the yaptide platform. Images from pull request allows for fast way of testing new features proposed in the PR. - -## Usage - -All available packages are shown in the [Packages](https://github.com/orgs/yaptide/packages) section of the yaptide organisation in GitHub. Newest master branch image is available with tag `master`. For pull requests it is a PR number prefixed with `pr-`, e.g. `pr-17`. Corresponding docker pull command can be read after clicking on the package. For this case it would be: -```bash -docker pull ghcr.io/yaptide/yaptide-flask:pr-17 -``` - -Deployed packages can be accessed from gitpod.io or GitHub Codespaces to easily run and test them in pull requests or on master branch. To pull the image in gitpod.io it might be requested to log in to ghcr.io via Docker using GitHub credentials: -```bash -docker login ghcr.io --username -``` -Then it is allowed to pull the images. In GitHub Codespaces the above comand is not required. - -## Retention policies - -GitHub Container Registry doesn't provide any retention mechanisms. It is required to use external solutions and define own GitHub Actions for this purpose. Both flask and worker images are automatically cleaned up in the registry based on the custom retention policies defined in `cleanup-closed-pr-packages` and `packages-retention` actions: - -- Outdated master's packages are removed if they are older than 1 month. -- Pull request's newest packages are removed when it is merged or closed. -- Outdated pull requests' packages are removed if they are older than 2 weeks. -- Latest pull requests' packages are removed if they are older than 2 months. - -It is also possible to run the latter two policies manually by dispatching the `packages-retention` GitHub action. Normally it is dispatched using cron job every Monday at 04:30 AM. - -To delete the packages from ghcr.io registry, it is required to use the PAT token created by organisation and repository admin with `read:packages` and `delete:packages` permissions. It should be placed in the organisation's secrets. It is not possible to use other kind of tokens, e.g. action scoped `GITHUB_TOKEN` or fine-grained token. diff --git a/docs/backend/index.md b/docs/backend/index.md deleted file mode 100644 index 0b3ffdc..0000000 --- a/docs/backend/index.md +++ /dev/null @@ -1,16 +0,0 @@ -# Yaptide (backend) - -Github link: [https://github.com/yaptide/yaptide](https://github.com/yaptide/yaptide) - -Developer documentation of the yaptide project - -The documenation contains: - - * [For developers](for_developers.md) - How to build backend for developers - * [Using docker](using_docker.md) - How to build backend for deployment using Docker - * [Simulator Management](simulator_management.md) - Managing simulation engine binaries with S3 storage - * [API reference](swagger.md) - (useful for frontend development), auto-generated from swagger yaml - * [Jobs and tasks](states.md) - Description of states of jobs and task - * [Persistent storage](persistency.md) - Description of database model - * [Docker images on GHcR](ghcr_packages.md) - GitHub Container Registry. Deployed to ghcr.io registry. - diff --git a/docs/backend/mkdocs.md b/docs/backend/mkdocs.md deleted file mode 100644 index 19d4d8c..0000000 --- a/docs/backend/mkdocs.md +++ /dev/null @@ -1,94 +0,0 @@ -# Developer documentation - -The documentation indended for developes is located in the `docs` folder. -We use [mkdocs](https://www.mkdocs.org) with [material for mkdocs](https://squidfunk.github.io/mkdocs-material/) customisation to generate the documentation in the HTML format. - -## Documentation structure - -### Technical documentation - -Technical documentation is written in markdown format and can be found in the [docs folder](https://github.com/yaptide/yaptide/tree/master/docs). - -### API reference - -The [API reference](swagger.md) is generated from the [swagger](https://swagger.io) yaml file. -The [swagger.yaml](https://github.com/yaptide/yaptide/blob/master/yaptide/static/openapi.yaml) file is located in the [yaptide/static](https://github.com/yaptide/yaptide/tree/master/yaptide/static) folder. This is the location from which Flask serve it when the backend is deployed. - -The HTML API documentation is rendered using [render_swagger](https://github.com/bharel/mkdocs-render-swagger-plugin) mkdocs plugin installed as [mkdocs-render-swagger-plugin](https://pypi.org/project/mkdocs-render-swagger-plugin/) pip package. -Its a bit abandoned project but it seems to be the only solution to generate static HTML from swagger yaml file. -The swagger documenation can be viewed locally by deploying backend and connecting to the backend server via `/api/docs` endpoint. -By using the `mkdocs-render-swagger-plugin` we can serve the documenation statically on github pages. -This way users may read the documenation without deploying the backend. - -The `mkdocs-render-swagger-plugin` expects the swagger yaml file to be located in the [docs folder](https://github.com/yaptide/yaptide/tree/master/docs). Therefore we modified the [docs/gen_ref_pages.py](https://github.com/yaptide/yaptide/blob/master/docs/gen_ref_pages.py) script to copy the swagger yaml file from the Flask static directory to the docs folder. The copy happens whenever the `mkdocs build` or `mkdocs serve` command is run. - -### Code reference - -The code reference is generated using [mkdocs-gen-files](https://github.com/oprypin/mkdocs-gen-files) mkdocs plugin. -We have a [docs/gen_ref_pages.py](https://github.com/yaptide/yaptide/blob/master/docs/gen_ref_pages.py) scripts that crawls through all Python files in the [yaptide folder](https://github.com/yaptide/yaptide/tree/master/yaptide) directory. Then it generates on-the-fly a markdown documentation from docstrings for each module, class and function. Also a on-the-fly `reference/SUMMARY.md` file is generated using [mkdocs-literate-nav](https://github.com/oprypin/mkdocs-literate-nav) mkdocs plugin. This file serves as left-side menu for the code reference. - -### Tests coverage - -The tests coverage is generated using [mkdocs-coverage](https://github.com/pawamoy/mkdocs-coverage) mkdocs plugin. This plugin expects a pytest coverage report in the `htmlcov` directory. - -## Github Pages deployment of the documentation - -Github pages deployment is done using [GitHub Actions docs workflow](https://github.com/yaptide/yaptide/blob/master/.github/workflows/docs.yml). -It deploys new version of the documentation whenever a new commit is pushed to the `master` branch. -The deployment includes generation of test coverage report and API reference documentation. - -## Local deployment of the documentation - -### Prerequisites - -First, user needs to install [poetry](https://python-poetry.org). -Then, user needs to install the dependencies for the backend and the documentation: - -```bash -poetry install --only main,docs -``` - -### Building the documentation - -To build the documentation run the following command: - -```bash -poetry run mkdocs build -``` - -this will generate the documentation in the `site` folder. - -To serve the documentation locally run the following command: - -```bash -poetry run mkdocs serve -``` - -This will start a local webserver on port 8000. The documentation can be viewed by opening the following url in a browser: http://localhost:8000 - -### Working with the technical documentation - -After modification of the markdown file the documenation served via `mkdocs serve` command will be updated automatically. - -### Working with the API reference - -After modification of the swagger yaml one needs to stop the `mkdocs serve` command and run it again. This is required as to re-generate the API reference documentation mkdocs needs to copy the swagger yaml file from the Flask static directory to the docs folder. -Please avoid modification and commiting of the swagger yaml file in the docs folder as it will be overwritten by the `mkdocs serve` command. - -### Working with the code reference - -After modification of the Python code one needs to stop the `mkdocs serve` command and run it again. - -### Working with the tests coverage - -To regeneate tests coverage one needs to run the following command: - -```bash -poetry run pytest --cov-report html:htmlcov --cov=yaptide -``` - -Note that this requires installation of dependencies for the backend and the tests: - -```bash -poetry install --only main,test -``` diff --git a/docs/backend/openapi.yaml b/docs/backend/openapi.yaml deleted file mode 100644 index 81e3ef4..0000000 --- a/docs/backend/openapi.yaml +++ /dev/null @@ -1,1418 +0,0 @@ -openapi: 3.0.3 -info: - title: Yaptide Project Api Documentation - version: 1.0.0 - description: Yaptide Project Api Documentation -servers: - - url: http://localhost:5000 -paths: - /: - get: - security: - - basicAuth: [ ] - summary: Allows to check if server is alive - description: Allows to check if server is alive. If server is down it won't respond to this request - responses: - '200': - description: Successful operation - content: - application/json: - schema: - $ref: '#/components/schemas/BasicResponse' - -# Authorisation Routes - /auth/register: - put: - summary: Allows registration of new users - requestBody: - required: true - content: - application/json: - schema: - $ref: '#/components/schemas/RegisterLoginRequest' - responses: - '201': - description: Returns message - User created - content: - application/json: - schema: - $ref: '#/components/schemas/BasicResponse' - '400': - description: Bad Request - Missing payload or keys in payload - content: - application/json: - schema: - $ref: '#/components/schemas/BasicResponse' - '403': - description: Forbidden - User already exists - content: - application/json: - schema: - $ref: '#/components/schemas/BasicResponse' - '500': - description: Internal Server Error - content: - application/json: - schema: - $ref: '#/components/schemas/BasicResponse' - /auth/login: - post: - summary: Allows to login the user - server sets refresh and access tokens in cookies - requestBody: - required: true - content: - application/json: - schema: - $ref: '#/components/schemas/RegisterLoginRequest' - responses: - '202': - description: Includes expiration time in milliseconds of new access tokens with message - Successfully logged in - content: - application/json: - schema: - type: object - properties: - message: - type: string - access_exp: - type: integer - refresh_exp: - type: integer - '400': - description: Bad Request - Missing payload or keys in payload - content: - application/json: - schema: - $ref: '#/components/schemas/BasicResponse' - '401': - description: Forbidden - Invalid login or password - content: - application/json: - schema: - $ref: '#/components/schemas/BasicResponse' - '500': - description: Internal Server Error - content: - application/json: - schema: - $ref: '#/components/schemas/BasicResponse' - /auth/refresh: - get: - summary: Allows to refresh access token - server sets new access token in cookies - responses: - '200': - description: Includes expiration time in milliseconds of new access token - content: - application/json: - schema: - type: object - properties: - message: - type: string - access_exp: - type: integer - '401': - description: Unauthorized - No token provided - content: - application/json: - schema: - $ref: '#/components/schemas/BasicResponse' - '403': - description: Forbidden - User not found or log in required - content: - application/json: - schema: - $ref: '#/components/schemas/BasicResponse' - '500': - description: Internal Server Error - content: - application/json: - schema: - $ref: '#/components/schemas/BasicResponse' - /auth/status: - get: - summary: Allows to retrieve logged in user information - responses: - '200': - description: Includes logged in user information, shown in example response below - content: - application/json: - schema: - type: object - properties: - message: - type: string - username: - type: string - '401': - description: Unauthorized - Invalid credentials - content: - application/json: - schema: - $ref: '#/components/schemas/BasicResponse' - '403': - description: Forbidden - User not found, log in required or token refresh required - content: - application/json: - schema: - $ref: '#/components/schemas/BasicResponse' - '500': - description: Internal Server Error - content: - application/json: - schema: - $ref: '#/components/schemas/BasicResponse' - /auth/logout: - delete: - summary: Allows to logout the user - server removes access and refresh tokens from cookies. If nobody was logged in then just nothing happens - responses: - '200': - description: User was successfully logged out - content: - application/json: - schema: - $ref: '#/components/schemas/BasicResponse' - -# Keycloak auth Routes - /auth/keycloak: - post: - summary: Allows to login the user with keycloak credentials - server sets refresh and access tokens in cookies - description: At current state this endpoint works only with server running on C3 - parameters: - - in: header - name: Authorization - schema: - type: string - required: true - description: keycloak token - requestBody: - required: true - content: - application/json: - schema: - $ref: '#/components/schemas/KeycloakRequest' - responses: - '202': - description: Includes expiration time in milliseconds of new access token - content: - application/json: - schema: - type: object - properties: - message: - type: string - access_exp: - type: integer - '400': - description: Bad Request - Missing payload or keys in payload - content: - application/json: - schema: - $ref: '#/components/schemas/BasicResponse' - '401': - description: Forbidden - No token provided - content: - application/json: - schema: - $ref: '#/components/schemas/BasicResponse' - '500': - description: Internal Server Error - content: - application/json: - schema: - $ref: '#/components/schemas/BasicResponse' - -# User Routes - /user/simulations: - get: - summary: Allows for getting user's simulations - parameters: - - in: query - name: page_size - schema: - type: integer - description: Specifies the page size from range [0,100] - incorrect or - non provided value will result in sending page with size 10 - - in: query - name: page_idx - schema: - type: integer - description: Specifies the page index to be send - incorrect or non - provided value will result in sending page with index 0 - - in: query - name: order_by - schema: - type: string - enum: [start_time, end_time] - description: Specifies the parameter by which pages are sorted, available are start_time or end_time - - incorrect or non provided value will result in sending page sorted by start_time - - in: query - name: order_type - schema: - type: string - enum: [ascend, descend] - description: Specifies the order in which pages are sorted, available are ascend or descend - - incorrect or non provided value will result in sending page sorted by ascend - responses: - '200': - description: Includes list of user's simulations - content: - application/json: - schema: - type: object - properties: - message: - type: string - page_count: - type: integer - description: returns the number of available pages - simulations_count: - type: integer - description: returns the number of owned simulations - simulations: - description: is a list of simulations returned in requested page - type: array - items: - $ref: '#/components/schemas/Simulation' - - '400': - description: Bad Request - Missing parameters - content: - application/json: - schema: - $ref: '#/components/schemas/BasicResponse' - '401': - description: Unauthorized - Invalid credentials - content: - application/json: - schema: - $ref: '#/components/schemas/BasicResponse' - '403': - description: Forbidden - User not found, log in required or token refresh required - content: - application/json: - schema: - $ref: '#/components/schemas/BasicResponse' - '500': - description: Internal Server Error - content: - application/json: - schema: - $ref: '#/components/schemas/BasicResponse' - delete: - summary: Allows for deleting simulation. - parameters: - - in: query - name: job_id - schema: - type: string - responses: - '200': - description: Simulation was succesfully deleted - content: - application/json: - schema: - $ref: '#/components/schemas/BasicResponse' - '400': - description: Bad Request - Missing parameters - content: - application/json: - schema: - $ref: '#/components/schemas/BasicResponse' - '401': - description: Unauthorized - User does not have permission to delete this simulation - content: - application/json: - schema: - $ref: '#/components/schemas/BasicResponse' - '403': - description: Forbidden - The simulation is currently running, needs to be canceled or completed before deletion - content: - application/json: - schema: - $ref: '#/components/schemas/BasicResponse' - '404': - description: Not Found - The simulation with the provided job_id does not exist - content: - application/json: - schema: - $ref: '#/components/schemas/BasicResponse' - '500': - description: Internal Server Error - content: - application/json: - schema: - $ref: '#/components/schemas/BasicResponse' - /user/clusters: - description: Returns available clusters - get: - summary: Allows for getting available clusters - responses: - '200': - description: Includes list of available clusters - content: - application/json: - schema: - type: object - properties: - message: - type: string - clusters: - type: array - items: - type: object - properties: - cluster_name: - type: string - '400': - description: Bad Request - Missing parameters - content: - application/json: - schema: - $ref: '#/components/schemas/BasicResponse' - '401': - description: Unauthorized - Invalid credentials - content: - application/json: - schema: - $ref: '#/components/schemas/BasicResponse' - '403': - description: Forbidden - User not found, log in required or token refresh required - content: - application/json: - schema: - $ref: '#/components/schemas/BasicResponse' - '500': - description: Internal Server Error - content: - application/json: - schema: - $ref: '#/components/schemas/BasicResponse' - -# Jobs Routes - /jobs: - get: - summary: Allows for getting job status from database. - description: Endpoint designed for getting periodical job status from database. - parameters: - - in: query - name: job_id - schema: - type: string - responses: - '200': - description: Includes status of the job - content: - application/json: - schema: - type: object - properties: - message: - type: string - job_state: - type: string - enum: [PENDING, RUNNING, COMPLETED, FAILED] - description: job state - job_task_status: - type: array - items: - $ref: '#/components/schemas/JobTaskStatus' - '400': - description: Bad Request - Missing parameters - content: - application/json: - schema: - $ref: '#/components/schemas/BasicResponse' - '401': - description: Unauthorized - Invalid credentials - content: - application/json: - schema: - $ref: '#/components/schemas/BasicResponse' - '403': - description: Forbidden - User not found, log in required, token refresh required or job with provided ID does not belong to the user - content: - application/json: - schema: - $ref: '#/components/schemas/BasicResponse' - '404': - description: Job with provided ID does not exist - content: - application/json: - schema: - $ref: '#/components/schemas/BasicResponse' - '408': - description: Timeout - content: - application/json: - schema: - $ref: '#/components/schemas/BasicResponse' - '500': - description: Internal Server Error - content: - application/json: - schema: - $ref: '#/components/schemas/BasicResponse' - post: - summary: For updating simulation object in database and uploading error logs - requestBody: - required: true - content: - application/json: - schema: - $ref: '#/components/schemas/UpdateJob' - responses: - '202': - description: Simulation updated - '501': - description: Error updating simulation - /jobs/direct: - post: - summary: Allows for submitting jobs to run directly on server. - requestBody: - required: true - content: - application/json: - schema: - allOf: - - $ref: '#/components/schemas/JobsRequest' - - $ref: '#/components/schemas/Input' - responses: - '202': - description: Includes job_id and additional information about submited job - content: - application/json: - schema: - type: object - properties: - job_id: - type: string - message: - type: string - '400': - description: Bad Request - Missing payload or keys in payload - content: - application/json: - schema: - $ref: '#/components/schemas/BasicResponse' - '401': - description: Unauthorized - Invalid credentials - content: - application/json: - schema: - $ref: '#/components/schemas/BasicResponse' - '403': - description: Forbidden - User not found, log in required or token refresh required - content: - application/json: - schema: - $ref: '#/components/schemas/BasicResponse' - '500': - description: Internal Server Error - content: - application/json: - schema: - $ref: '#/components/schemas/BasicResponse' - get: - summary: Allows for getting job status and additional job's info. Available only for jobs run directly. - description: At current stage of development it does not provide any addtional features than GET method from '/jobs' route - it does the same thing but it is available only for jobs run directly. - parameters: - - in: query - name: job_id - schema: - type: string - responses: - '200': - description: Includes status and additional information about the job - content: - application/json: - schema: - type: object - properties: - message: - type: string - job_state: - type: string - enum: [PENDING, RUNNING, COMPLETED, FAILED] - description: job state - job_task_status: - type: array - items: - $ref: '#/components/schemas/JobTaskStatus' - '400': - description: Bad Request - Missing parameters - content: - application/json: - schema: - $ref: '#/components/schemas/BasicResponse' - '401': - description: Unauthorized - Invalid credentials - content: - application/json: - schema: - $ref: '#/components/schemas/BasicResponse' - '403': - description: Forbidden - User not found, log in required, token refresh required or job with provided ID does not belong to the user - content: - application/json: - schema: - $ref: '#/components/schemas/BasicResponse' - '404': - description: Job with provided ID does not exist - content: - application/json: - schema: - $ref: '#/components/schemas/BasicResponse' - '408': - description: Timeout - content: - application/json: - schema: - $ref: '#/components/schemas/BasicResponse' - '500': - description: Internal Server Error - content: - application/json: - schema: - $ref: '#/components/schemas/BasicResponse' - delete: - summary: Allows for job cancelation. Available only for jobs run directly. - parameters: - - in: query - name: job_id - schema: - type: string - responses: - '200': - description: Returns information about main job and subtasks' cancelation - content: - application/json: - schema: - $ref: '#/components/schemas/JobsDirectDeleteResponse' - '400': - description: Bad Request - Missing parameters - content: - application/json: - schema: - $ref: '#/components/schemas/BasicResponse' - '401': - description: Unauthorized - Invalid credentials - content: - application/json: - schema: - $ref: '#/components/schemas/BasicResponse' - '403': - description: Forbidden - User not found, log in required, token refresh required or job with provided ID does not belong to the user - content: - application/json: - schema: - $ref: '#/components/schemas/BasicResponse' - '404': - description: Job with provided ID does not exist - content: - application/json: - schema: - $ref: '#/components/schemas/BasicResponse' - '408': - description: Timeout - content: - application/json: - schema: - $ref: '#/components/schemas/BasicResponse' - '500': - description: Internal Server Error - content: - application/json: - schema: - $ref: '#/components/schemas/BasicResponse' - /jobs/batch: - post: - summary: Allows for submitting jobs to run on SLURM. - description: This endpoint can be used only by keycloak authenticated users - requestBody: - required: true - content: - application/json: - schema: - allOf: - - $ref: '#/components/schemas/JobsRequest' - - $ref: '#/components/schemas/Input' - - $ref: '#/components/schemas/BatchOptions' - responses: - '202': - description: Includes job_id and additional information about submited job - content: - application/json: - schema: - type: object - properties: - job_id: - type: string - message: - type: string - sh_files: - type: object - description: contains sh files used by the job (additional info in response from '/jobs/batch') - properties: - submit: - type: string - description: submit is a file which prepares the environment and starts array and collect - array: - type: string - description: array is array job script which runs the simulation - collect: - type: string - description: collect is a script collecting results generated by simulation - submit_stdout: - type: string - description: submit_stdout is output generated by submit.sh (additional info in response from '/jobs/batch') - '400': - description: Bad Request - Missing payload or keys in payload - content: - application/json: - schema: - $ref: '#/components/schemas/BasicResponse' - '401': - description: Unauthorized - Invalid credentials - content: - application/json: - schema: - $ref: '#/components/schemas/BasicResponse' - '403': - description: Forbidden - User not found, log in required or token refresh required - content: - application/json: - schema: - $ref: '#/components/schemas/BasicResponse' - '500': - description: Internal Server Error - content: - application/json: - schema: - $ref: '#/components/schemas/BasicResponse' - get: - summary: Allows for getting job status and additional job's info. Available only for jobs run on SLURM. - description: This endpoint can be used only by keycloak authenticated users. It is recommended not to use it very often because of the high load it puts on the SLURM system. - parameters: - - in: query - name: job_id - schema: - type: string - responses: - '200': - description: Includes status and additional information about the job - content: - application/json: - schema: - type: object - properties: - message: - type: string - job_state: - type: string - enum: [PENDING, RUNNING, COMPLETED, FAILED] - description: job state - job_task_status: - type: array - items: - $ref: '#/components/schemas/JobTaskStatus' - sh_files: - type: object - description: contains sh files used by the job (additional info in response from '/jobs/batch') - properties: - submit: - type: string - description: submit is a file which prepares the environment and starts array and collect - array: - type: string - description: array is array job script which runs the simulation - collect: - type: string - description: collect is a script collecting results generated by simulation - submit_stdout: - type: string - description: submit_stdout is output generated by submit.sh (additional info in response from '/jobs/batch') - '400': - description: Bad Request - Missing parameters - content: - application/json: - schema: - $ref: '#/components/schemas/BasicResponse' - '401': - description: Unauthorized - Invalid credentials - content: - application/json: - schema: - $ref: '#/components/schemas/BasicResponse' - '403': - description: Forbidden - User not found, log in required, token refresh required or job with provided ID does not belong to the user - content: - application/json: - schema: - $ref: '#/components/schemas/BasicResponse' - '404': - description: Job with provided ID does not exist - content: - application/json: - schema: - $ref: '#/components/schemas/BasicResponse' - '408': - description: Timeout - content: - application/json: - schema: - $ref: '#/components/schemas/BasicResponse' - '500': - description: Internal Server Error - content: - application/json: - schema: - $ref: '#/components/schemas/BasicResponse' - delete: - summary: Allows for job cancelation. Available only for jobs run on SLURM. - description: This endpoint can be used only by keycloak authenticated users - parameters: - - in: query - name: job_id - schema: - type: string - responses: - '200': - description: Job was succesfully canceled - content: - application/json: - schema: - $ref: '#/components/schemas/BasicResponse' - '400': - description: Bad Request - Missing parameters - content: - application/json: - schema: - $ref: '#/components/schemas/BasicResponse' - '401': - description: Unauthorized - Invalid credentials - content: - application/json: - schema: - $ref: '#/components/schemas/BasicResponse' - '403': - description: Forbidden - User not found, log in required, token refresh required or job with provided ID does not belong to the user - content: - application/json: - schema: - $ref: '#/components/schemas/BasicResponse' - '404': - description: Job with provided ID does not exist - content: - application/json: - schema: - $ref: '#/components/schemas/BasicResponse' - '408': - description: Timeout - content: - application/json: - schema: - $ref: '#/components/schemas/BasicResponse' - '500': - description: Internal Server Error - content: - application/json: - schema: - $ref: '#/components/schemas/BasicResponse' - -# Inputs Route - /inputs: - get: - summary: Allows for retrieving input used to run simulation - parameters: - - in: query - name: job_id - schema: - type: string - responses: - '200': - description: Includes input used to run simulation - content: - application/json: - schema: - type: object - properties: - message: - type: string - input: - type: object - properties: - input_json: - type: object - description: available only if simulation was run with JSON input - input_files: - type: object - description: files used by simulation - input_type: - type: string - enum: [editor, files] - description: simulation input type - number_of_all_primaries: - type: integer - description: requested number of all primaries - '400': - description: Bad Request - Missing parameters - content: - application/json: - schema: - $ref: '#/components/schemas/BasicResponse' - '401': - description: Unauthorized - Invalid credentials - content: - application/json: - schema: - $ref: '#/components/schemas/BasicResponse' - '403': - description: Forbidden - User not found, log in required, token refresh required or job with provided ID does not belong to the user - content: - application/json: - schema: - $ref: '#/components/schemas/BasicResponse' - '404': - description: Job with provided ID does not exist - content: - application/json: - schema: - $ref: '#/components/schemas/BasicResponse' - '500': - description: Internal Server Error - content: - application/json: - schema: - $ref: '#/components/schemas/BasicResponse' - -# Result Routes - /results: - get: - summary: Allows getting results of the simulation. - description: This endpoint retrieves the results of a simulation. If the `estimator_name` parameter is provided, - the response will include results only for the specified estimator. If no `estimator_name` is provided, - the response will include results for all estimators associated with the job. Additionally, you can - specify pages using the `page_number` or `page_numbers` parameter to limit the response to specific pages. - parameters: - - in: query - name: job_id - required: true - description: ID of the job to retrieve results for - schema: - type: string - - in: query - name: estimator_name - required: false - description: Name of a specific estimator to retrieve results for - schema: - type: string - - in: query - name: page_number - required: false - description: Retrieve a specific page of results for the specified estimator. - schema: - type: integer - - in: query - name: page_numbers - required: false - description: Retrieve a specific pages of results for the specified estimator. For example, `"1-3,5"` would retrieve pages 1 through 3 and page 5. - schema: - type: integer - responses: - '200': - description: Includes results of the simulation - content: - application/json: - schema: - oneOf: - - type: object - properties: - message: - type: string - estimators: - type: array - items: - $ref: '#/components/schemas/Estimator' - - type: object - properties: - message: - type: string - estimator: - $ref: '#/components/schemas/Estimator' - - type: object - properties: - message: - type: string - pages: - type: array - items: - $ref: '#/components/schemas/Page' - - type: object - properties: - message: - type: string - page: - $ref: '#/components/schemas/Page' - '400': - description: Bad Request - Missing parameters - content: - application/json: - schema: - $ref: '#/components/schemas/BasicResponse' - '401': - description: Unauthorized - Invalid credentials - content: - application/json: - schema: - $ref: '#/components/schemas/BasicResponse' - '403': - description: Forbidden - User not found, log in required, token refresh required or job with provided ID does not belong to the user - content: - application/json: - schema: - $ref: '#/components/schemas/BasicResponse' - '404': - description: Job with provided ID does not exist - content: - application/json: - schema: - $ref: '#/components/schemas/BasicResponse' - '500': - description: Internal Server Error - content: - application/json: - schema: - $ref: '#/components/schemas/BasicResponse' - -# Estimator Routes - /estimators: - get: - summary: Allows getting metadata of estimators for a specific simulation. - parameters: - - in: query - name: job_id - required: true - description: ID of the job to retrieve estimators metadata for - schema: - type: string - responses: - '200': - description: Includes estimators metadata - content: - application/json: - schema: - type: object - properties: - message: - type: string - estimators_metadata: - type: array - items: - type: object - properties: - name: - type: string - pages_metadata: - type: array - items: - type: object - properties: - page_number: - type: integer - page_name: - type: string - page_dimension: - type: integer - '400': - description: Bad Request - Missing parameters - content: - application/json: - schema: - $ref: '#/components/schemas/BasicResponse' - '401': - description: Unauthorized - Invalid credentials - content: - application/json: - schema: - $ref: '#/components/schemas/BasicResponse' - '403': - description: Forbidden - User not found, log in required, token refresh required or job with provided ID does not belong to the user - content: - application/json: - schema: - $ref: '#/components/schemas/BasicResponse' - '404': - description: Job with provided ID does not exist - content: - application/json: - schema: - $ref: '#/components/schemas/BasicResponse' - '500': - description: Internal Server Error - content: - application/json: - schema: - $ref: '#/components/schemas/BasicResponse' - -# Task Routes - /tasks: - description: Route dedicated for Backend internal communication - post: - summary: Updates task state - description: Used by tasks to update their state - requestBody: - required: true - content: - application/json: - schema: - type: object - properties: - simulation_id: - type: integer - description: id of task's parent simulation - task_id: - type: string - description: id task to update - update_key: - type: string - description: authentication key provided to tasks - update_dict: - type: object - description: dict containing update data - required: - - simulation_id - - task_id - - update_key - - update_dict - responses: - default: - description: Response JSON for all codes - content: - application/json: - schema: - $ref: '#/components/schemas/BasicResponse' - -# Logfiles Routes - /logfiles: - post: - summary: Allows for uploading logfiles of the simulation by its tasks - description: At current stage of development incoming logfiles override existing ones - requestBody: - required: true - content: - application/json: - schema: - type: object - properties: - simulation_id: - type: integer - description: id of task's parent simulation - update_key: - type: string - description: authentication key provided to tasks - logfiles: - type: object - description: dict containing log files - required: - - simulation_id - - update_key - - logfiles - responses: - default: - description: Response JSON for all codes - content: - application/json: - schema: - $ref: '#/components/schemas/BasicResponse' - get: - summary: Allows getting logfiles of the simulation - parameters: - - in: query - name: job_id - schema: - type: string - responses: - '200': - description: Includes logfiles of the simulation - content: - application/json: - schema: - type: object - properties: - message: - type: string - logfiles: - type: object - description: dict of log files with names as keys and content as values. - '400': - description: Bad Request - Missing parameters - content: - application/json: - schema: - $ref: '#/components/schemas/BasicResponse' - '401': - description: Unauthorized - Invalid credentials - content: - application/json: - schema: - $ref: '#/components/schemas/BasicResponse' - '403': - description: Forbidden - User not found, log in required, token refresh required or job with provided ID does not belong to the user - content: - application/json: - schema: - $ref: '#/components/schemas/BasicResponse' - '404': - description: Job with provided ID does not exist - content: - application/json: - schema: - $ref: '#/components/schemas/BasicResponse' - '500': - description: Internal Server Error - content: - application/json: - schema: - $ref: '#/components/schemas/BasicResponse' - -# Components -components: - securitySchemes: - basicAuth: # <-- arbitrary name for the security scheme - type: http - scheme: basic - schemas: - BasicResponse: - type: object - properties: - message: - type: string - description: body message - KeycloakRequest: - type: object - properties: - username: - type: string - required: - - username - RegisterLoginRequest: - type: object - properties: - username: - type: string - password: - type: string - required: - - username - - password - Metadata: - type: object - description: static additional information - properties: - platform: - type: string - description: specifies platform on which simulation is running, can be DIRECT or BATCH - server: - type: string - description: specifies platform on which simulation is running, can be DIRECT or BATCH - input_type: - type: string - description: specifies input which was used to run simulation, can be YAPTIDE_PROJECT or INPUT_FILES - sim_time: - type: string - description: specifies simulator which was used to run simulation, can be SHIELDHIT or DUMMY - Simulation: - type: object - properties: - title: - type: string - description: custom title set to for this simulation - job_id: - type: string - description: id of the job - start_time: - type: string - description: starting time of the simulation - end_time: - type: string - description: ending time of the simulation - if it is still running the value is NULL - metadata: - $ref: '#/components/schemas/Metadata' - Input: - oneOf: - - type: object - properties: - input_json: - type: object - description: specifies input json - required: - - input_json - - type: object - properties: - input_files: - type: object - properties: - beam.dat: - type: string - detect.dat: - type: string - geo.dat: - type: string - mat.dat: - type: string - description: specifies input files - required: - - input_files - JobsRequest: - type: object - properties: - ntasks: - type: integer - description: specifies number of parallel tasks to be run, default is maximum available for '/jobs/direct' and 1 for '/jobs/batch' - sim_type: - type: string - enum: [shieldhit, dummy] - description: specifies simulator type - title: - type: string - description: custom title set to for this simulation, default is workspace - input_type: - type: string - enum: [editor, files] - description: specifies input type - # oneOf: - # - input_files: - # type: array - # items: - # type: object - # properties: - # beam.dat: - # type: string - # detect.dat: - # type: string - # geo.dat: - # type: string - # mat.dat: - # type: string - # description: is required if input_type is files - # - input_json: - # type: object - # description: input_json is required if input_type is editor - required: - - ntasks - - input_type - - sim_type - BatchOptions: - type: object - properties: - batch_options: - type: object - description: available options can be found here: https://slurm.schedmd.com/sbatch.html. NOTE: if batch_options is not provided, SLURM will run both scripts with default settings - properties: - cluster_name: - type: string - description: stands for the cluster to be used to run simulation; it has to be one of available clusters for the user - check '/user/clusters' endpoint description; if it is not provided or provided cluster name is incorrect, first available cluster will be used - array_options: - type: object - description: dictionary of command line options used while running files with sbatch. Pairs should be option name as key and option value as value. If 2 same parameters are specified in both options and coresponding header, parameters from header will be ignored because command line options are more important for the SLURM - array_header: - type: string - description: header for files run on SLURM with sbatch - collect_options: - type: object - description: dictionary of command line options used while running files with sbatch. Pairs should be option name as key and option value as value. If 2 same parameters are specified in both options and coresponding header, parameters from header will be ignored because command line options are more important for the SLURM - collect_header: - type: string - description: header for files run on SLURM with sbatch - JobTaskStatus: - type: object - description: error, logfiles, input_files, input_json and results are deprecated: error is now sent as message, logfiles - accessed via /logfiles endpoint, results - accessed via /results endpoint, input_files and input_json - accessed via /inputs endpoint. - properties: - task_state: - type: string - enum: [PENDING, RUNNING, COMPLETED, FAILED] - description: task state - simulated_primaries: - type: integer - description: primaries already calculated by the task - requested_primaries: - type: integer - description: primaries to calculate for this task - last_update_time: - type: string - description: last time when task was updated - estimated_time: - type: object - description: is returned only, when task_state is RUNNING - not always because estimation is prepared after some period of time - properties: - hours: - type: integer - minutes: - type: integer - seconds: - type: integer - JobsDirectDeleteResponse: - type: object - properties: - message: - type: string - merge: - type: object - properties: - message: - type: string - job_state: - type: string - enum: [PENDING, RUNNING, COMPLETED, FAILED] - tasks: - type: array - items: - $ref: '#/components/schemas/CanceledTaskStatus' - CanceledTaskStatus: - type: object - properties: - message: - type: string - task_state: - type: string - enum: [PENDING, RUNNING, COMPLETED, FAILED] - Page: - type: object - description: Page object - Estimator: - type: object - description: Estimator object includes estimator's name, associated metadata, and list of pages. - properties: - name: - type: string - description: The name of the estimator - metadata: - type: object - description: Metadata for the estimator - pages: - type: array - description: List of pages - items: - $ref: '#/components/schemas/Page' - UpdateJobs: - type: object - required: - - sim_id - properties: - sim_id: - type: integer - description: integer, primary key of simulation in database - job_dir: - type: string - array_id: - type: integer - collect_id: - type: integer - task_state: - enum: [FAILED] - log: - type: object - description: dict that will be uploaded as error log to database -security: - - basicAuth: [] # <-- use the same name here diff --git a/docs/backend/persistency.md b/docs/backend/persistency.md deleted file mode 100644 index 9fcc379..0000000 --- a/docs/backend/persistency.md +++ /dev/null @@ -1,352 +0,0 @@ -# Persistency storage - -## Data model - -We have following data model, implemented in `yaptide/persistence/models.py`: - -Simulation model and dependent classes: -```mermaid -classDiagram - class SimulationModel { - id: int - job_id: str - user_id: int - start_time: datetime - end_time: datetime - title: str - platform: str - input_type: str - sim_type: str - job_state: str - tasks - estimators - } - - class CelerySimulationModel { - id: int - merge_id: str - } - - class BatchSimulationModel { - id: int - cluster_id: int - job_dir: str - array_id: int - collect_id: int - } - - class TaskModel { - id: int - simulation_id: int - task_id: int - requested_primaries: int - simulated_primaries: int - task_state: str - estimated_time: int - start_time: datetime - end_time: datetime - platform: str - last_update_time: datetime - } - - class CeleryTaskModel { - id: int - celery_id: str - } - - class BatchTaskModel { - id: int - } - - class InputModel { - id: int - simulation_id: int - compressed_data: bytes - data - } - - class EstimatorModel { - id: int - simulation_id: int - name: str - file_name: str - compressed_data: bytes - data - } - - class PageModel { - id: int - page_name: str - estimator_id: int - page_number: int - page_dimension: int - compressed_data: bytes - data - } - - class LogfilesModel { - id: int - simulation_id: int - compressed_data: bytes - data - } - - SimulationModel <|-- CelerySimulationModel - SimulationModel <|-- BatchSimulationModel - TaskModel <|-- CeleryTaskModel - TaskModel <|-- BatchTaskModel - SimulationModel "1" *-- "0..*" TaskModel - SimulationModel "1" *-- "0..*" EstimatorModel - EstimatorModel "1" *-- "0..*" PageModel - SimulationModel "1" *-- "0..*" LogfilesModel - SimulationModel *-- InputModel -``` - -other classes we use are: - -```mermaid -classDiagram - class UserModel { - id: int - username: str - auth_provider: str - simulations - } - - class YaptideUserModel { - id: int - password_hash: str - } - - class KeycloakUserModel { - id: int - cert: str - private_key: str - } - - class ClusterModel { - id: int - cluster_name: str - simulations - } - - UserModel <|-- YaptideUserModel - UserModel <|-- KeycloakUserModel -``` - -We've been too lazy to write down the mermaid code for these diagrams, but ChatGPT nowadays does a good job on that. -Whenever you need to update the diagrams, just copy the code from the `yaptide/persistence/models.py` file and ask ChatGPT to generate the diagram for you. - -## Database - -Production version uses PostgreSQL database, while in the unit tests suite we use SQLite in-memory database. - -Sometimes it may be convenient to connect to the production DB from outside the container, e.g. to check the content of the database. -Then you can use the following command to get the DB URL. - -```shell -docker exec -it yaptide_flask bash -c "cd /usr/local/app && python -c 'from yaptide.application import create_app; app = create_app(); app.app_context().push() or print(app.extensions[\"sqlalchemy\"].engine.url.render_as_string(hide_password=False))'" -``` - -The code above is implemented as a handy onliner, the code may look tricky, especially the `app.app_context().push() or` part. -The reason for that hacking is simple. Regular methods to get the DB URL require the application context. This is usually achieved using `with app.app_context():` construct, which is not possible in the oneliner. - -Knowing the DB URL, you can connect to the DB using any DB client, e.g. `psql` or `pgadmin`. You can also use the `db_manage.py` script from the `yaptide/admin` directory. For example, to list all users in the DB, you can use the following command from outside the container: - -```shell -FLASK_SQLALCHEMY_DATABASE_URI=postgresql+psycopg://yaptide_user:yaptide_password@localhost:5432/yaptide_db ./yaptide/admin/db_manage.py list-users -``` - -This is equivalent to the following command executed inside the container: - -```shell -docker exec -it yaptide_flask ./yaptide/admin/db_manage.py list-users -``` - -## Developing model - -In Yaptide flask-migrate is responsible for modyfing database after each change to `models.py` and keeping track of versions of database (new version comes after each modification of models.py). - -### Development steps -For development - running yaptide_postgres in docker is required (Flask-migrate can be used on sqlite daatbase we use in development but it's postgres database on production we want to migrate). It's recommended to do development on local machine. - -1. Make sure all poetry dependencies are installed. Run `poetry shell` in terminal. -2. Calling `flask db` commands will require `FLASK_SQLALCHEMY_DATABASE_URI` variable to be defined before each execution: - - - The general pattern for `FLASK_SQLALCHEMY_DATABASE_URI` is taken from docker-compose (there is only `postgres` changed to localhost or 127.0.0.1 ): - - `FLASK_SQLALCHEMY_DATABASE_URI=postgresql+psycopg://${POSTGRES_USER:-yaptide_user}:${POSTGRES_PASSWORD:-yaptide_password}@localhost:5432/${POSTGRES_DB:-yaptide_db}` - - e.g. for local development `FLASK_SQLALCHEMY_DATABASE_URI=postgresql+psycopg://yaptide_user:yaptide_password@localhost:5432/yaptide_db` will be put before each `flask db` call. For local development it can be exported as variable but it's not recommended for environments where username, password are sensitive information. - - **From now each command in this docummentation containing `flask db` should be called with `FLASK_SQLALCHEMY_DATABASE_URI`**. - -3. Now it's time to prepare local/development database for development of models.py and creation of migration script. - - - In `docker-compose.yml` edit database service to use volume with different name, this will create new volume and old one won't get deleted. Run `scripts/start_with_docker.sh`. This will create database that for sure reflects what's in models.py. Then to mark database with version from migrations/versions, run `flask --app yaptide.application db stamp head`. This will save id of newest version of database in alembic_version table. **Be cautios as this option is only for development on local machine.** - -4. Do modifications in `models.py`. -5. Run `flask --app yaptide.application db migrate`. -6. There will be generated migration file in migrations/versions. Name of the newest file is displayed in output of above command. -7. **IMPORTANT!** Check the file carefully. For example there might be some `None` values which needs to be changed. - - script that adds CASCADE option to foreign constraint at first looks like this - ``` - def upgrade(): - # ### commands auto generated by Alembic - please adjust! ### - with op.batch_alter_table('Task', schema=None) as batch_op: - batch_op.drop_constraint('Task_simulation_id_fkey', type_='foreignkey') - batch_op.create_foreign_key(None, 'Simulation', ['simulation_id'], ['id'], ondelete='CASCADE') - ``` - In this case change None to 'Task_simulation_id_fkey'. -8. Run `flask --app yaptide.application db upgrade` to apply migration script. -9. To undo changes and go back to previous version run `flask --app yaptide.application db downgrade`. -10. Commit and push script and modification to models.py. - -### Testing migration script with copy of production volume - -1. If there is testing environment other than local - pull changes. -2. Copy volume ("data") of production postgres database and save it under different name. -3. In `docker-compose.yml` modify database configuration to use this volume. -4. Run `scripts/start_with_docker_develop.sh`, to run backend and additionaly pgadminer tool (see section Using pgadminer). -5. Again prepare `FLASK_SQLALCHEMY_DATABASE_URI` like above and use it together with each `flask db` command. -6. Run `flask --app yaptide.application db upgrade`. -7. Check also `flask --app yaptide.application db downgrade`, then do upgrade again. All should execute without errors. -8. Do manual testing. Check functionalities, some might be unnecessary to check depending which part in `models.py` was changed: - - logging in and out - - Loading simulation results, input files, logs. - - Submiting new simulation - - operations contained in `admin/db_manage.py` - -### Migrating production -1. Run git pull on master. -2. Backup the production database - Before applying any migrations, create a backup of the live database: - - ` - pg_dump -U -h -d > backup.sql - ` - - Alterniative is making copy of volume: - - ` - docker run --rm -v yaptide_data:/var/lib/postgresql/data -v /home/ubuntu/backup:/backup busybox tar czf /backup/yaptide_data_backup.tar.gz -C /var/lib/postgresql/data . - ` -3. Again prepare `FLASK_SQLALCHEMY_DATABASE_URI` like above and use it together with each `flask db` command. -4. Applying the migration in production - There are two options for applying the migration: - - Option 1: Execute from outside the Docker container - - - ` - FLASK_SQLALCHEMY_DATABASE_URI=postgresql+psycopg://:@:5432/db_name flask --app yaptide.application db upgrade - ` - - Option 2: Execute from inside the Flask container - - Access the container and run the upgrade: - - ` - docker exec -it bash - ` - - ` - FLASK_SQLALCHEMY_DATABASE_URI=postgresql+psycopg://:@:5432/db_name flask --app yaptide.application db upgrade - ` -5. Rollback strategy - In case of any issues, you can revert the changes by running: - ` - flask --app yaptide.application db downgrade - ` - Post-migration testing - Perform manual tests on the production system: - - - Verify logging in and out. - - Check simulation submissions. - - Ensure any functionality affected by the migration is working. - - -6. In case of restoring database from backup, run: - - ` - psql -U -h -c "DROP DATABASE IF EXISTS ;" - ` - - - ` - psql -U -h -c "CREATE DATABASE ;" - ` - - - ` - psql -U -h -d -f backup.sql - ` - - If copy of volume was made instead of backup.sql, run: - - ` - docker run --rm -v yaptide_data:/var/lib/postgresql/data -v /home/ubuntu/backup:/backup busybox tar xzf /backup/yaptide_data_backup.tar.gz -C /var/lib/postgresql/data - ` - - - -### Using pgadminer -Pgadminer is tool that lets user browse database through graphical interface. It can help with veryfication, testing and troubleshooting during migration. To run pgadminer alongside other containers run script: `scripts/start_with_docker_develop.sh`. If executed locally it can be accessed from browser with address: `localhost:9999`. When running from remote the tunnel connection is requred. Run: -``` -ssh -L 9999:localhost:9999 -``` -then open in browser localhost:9999. Log in with credentials set in compose file. rightclick on servers -> register -> server -> fill necessary fields general and connection tabs. - -## commands in db_manage.py - -The `db_manage.py` script provides several commands to manage the database. Below is a list of available commands along with their arguments and options: - -- **list_users** - - Printed columns: `username`, `auth_provider` - - Options: - - `-v`, `--verbose` - -- **add_user** - - Arguments: - - `name` - - Options: - - `--password` (default: '') - - `-v`, `--verbose` - -- **update_user** - - Arguments: - - `name` - - Options: - - `--password` (default: '') - - `-v`, `--verbose` - -- **remove_user** - - Arguments: - - `name` - - `auth_provider` - -- **list_tasks** - - Printed columns: `simulation_id`, `task_id`, `task_state`, `username` - - Options: - - `--user` - - `--auth-provider` - -- **remove_task** - - Arguments: - - `simulation_id` - - `task_id` - - Options: - - `-v`, `--verbose` - -- **list_simulations** - - Printed columns: `id`, `job_id`, `start_time`, `end_time`, `username` - - Options: - - `-v`, `--verbose` - - `--user` - - `--auth-provider` - -- **remove_simulation** - - Arguments: - - `simulation_id` - - Options: - - `-v`, `--verbose` - -- **add_cluster** - - Arguments: - - `cluster_name` - - Options: - - `-v`, `--verbose` - -- **list_clusters** - - Columns: `id`, `cluster_name` diff --git a/docs/backend/simulator_management.md b/docs/backend/simulator_management.md deleted file mode 100644 index 5f4aa03..0000000 --- a/docs/backend/simulator_management.md +++ /dev/null @@ -1,229 +0,0 @@ -# Managing Simulators with S3 Storage - -## Overview - -Yaptide supports two simulation engines: **SHIELD-HIT12A** and **FLUKA**. This document explains the simulator management system, how binaries are stored and deployed, and how to manage different simulator versions. - -## Architecture and Logic - -Yaptide implements a deployment strategy for managing simulation engine binaries. In the demo mode, when S3 environment variables are not configured in the `.env` file (or are missing from the environment), the system automatically downloads a constrained demo version of SHIELD-HIT12A directly from [shieldhit.org](https://shieldhit.org). This demo build is intended for development and validation, uses a fixed random number generator seed, enforces a 10,000 primary particle limit, and remains unencrypted because it is public. - -In production mode, when S3 credentials are configured in the `.env` file, the system retrieves full-featured simulator binaries from S3-compatible object storage. SHIELD-HIT12A and FLUKA binaries are stored in encrypted form; the decryption password and salt are supplied via the `.env` file and must match the values used during upload. This mode provides unrestricted functionality (custom random seeds, unlimited primaries) and retains a graceful fallback: if S3 download fails or credentials are unavailable, the system automatically falls back to the demo SHIELD-HIT12A build. - -## Flow During Container Startup - -The `yaptide-simulation-worker` container is built from [yaptide/Dockerfile-simulation-worker](https://github.com/yaptide/yaptide/blob/master/Dockerfile-simulation-worker) and invokes the startup script [yaptide/run_simulation_worker.sh](https://github.com/yaptide/yaptide/blob/master/run_simulation_worker.sh), which in turn calls the simulator manager [yaptide/yaptide/admin/simulators.py](https://github.com/yaptide/yaptide/blob/master/yaptide/admin/simulators.py). All required configuration is expected in the `.env` file; if a variable is absent there, the runtime environment is consulted, and if still missing a fallback path is taken. - -SHIELD-HIT12A is always initialized: the script first attempts to fetch and decrypt the encrypted binary from S3 using `S3_ENDPOINT`, `S3_ACCESS_KEY`, `S3_SECRET_KEY`, `S3_SHIELDHIT_BUCKET`, `S3_SHIELDHIT_KEY`, and the decryption parameters `S3_ENCRYPTION_PASSWORD` and `S3_ENCRYPTION_SALT` from `.env` or the environment. If any of these are unavailable or the download fails, the process falls back to downloading the public demo build from shieldhit.org and installs it into `/simulators/shieldhit12a/bin`. - -FLUKA is initialized only when `S3_FLUKA_BUCKET` and `S3_FLUKA_KEY` are provided in `.env` or the environment; the payload is encrypted and decrypted with the same password and salt variables. If these variables are absent, FLUKA installation is skipped, allowing deployments that rely solely on SHIELD-HIT12A. - -## Simulator Management Command Reference - -**Note:** The simulation binaries are compiled for Linux. All commands should be executed on Linux or within Windows Subsystem for Linux (WSL). Running on native Windows will not work. - -Use the project virtual environment managed by Poetry (see [Backend: For developers](for_developers.md) for installation and activation). The `simulators.py` CLI manages encrypted simulator binaries: it downloads SHIELD-HIT12A (with demo fallback), optionally downloads FLUKA, uploads encrypted artifacts to S3, and provides encrypt/decrypt helpers. Run `--help` to list commands: - -```bash -poetry run ./yaptide/admin/simulators.py --help -``` - -For command-specific options, append `--help`, e.g.: - -```bash -poetry run ./yaptide/admin/simulators.py download-shieldhit --help -``` - - -### Available Commands - -Most S3 parameters are read from environment variables (see [Environment Variables Configuration](#environment-variables-configuration) below). Run each command with `--help` for full options: - -#### Download SHIELD-HIT12A from S3 (with demo fallback) - -Retrieve the SHIELD-HIT12A binary from S3 storage and decrypt it locally. If S3 is unavailable, the system falls back to the public demo version: - -```bash -poetry run ./yaptide/admin/simulators.py download-shieldhit --dir ./download --decrypt -``` - -Confirm successful download by listing the directory: - -```bash -ls -lh ./download -``` - -Verify the binary version matches your expectations: -```bash -./download/shieldhit --version -``` - -Optionally validate the binary inside the simulation worker container (mounting read-only at a non-standard path to isolate from production simulators): -```bash -docker run --rm -it \ - -v "$(pwd)/download/shieldhit:/opt/test/shieldhit:ro" \ - --entrypoint /bin/bash yaptide_simulation_worker \ - -c "/opt/test/shieldhit --version" -``` - -For additional configuration options and parameters: - -```bash -poetry run ./yaptide/admin/simulators.py download-shieldhit --help -``` - -#### Retrieve FLUKA from S3 (encrypted) - -Download and decrypt the FLUKA binary from S3 storage. Requires valid S3 credentials configured in `.env`: - -```bash -poetry run ./yaptide/admin/simulators.py download-fluka --dir ./download -``` - -For additional configuration options: - -```bash -poetry run ./yaptide/admin/simulators.py download-fluka --help -``` - -#### Upload Simulator Binary to S3 - -Upload a compiled simulator binary to S3-compatible storage with optional encryption: - -```bash -poetry run ./yaptide/admin/simulators.py upload --bucket my-bucket --file ./shieldhit --encrypt -``` - -For additional configuration options: - -```bash -poetry run ./yaptide/admin/simulators.py upload --help -``` - -#### Encrypt File - -Manually encrypt a binary: - -```bash -poetry run ./yaptide/admin/simulators.py encrypt --infile ./shieldhit --outfile ./shieldhit.encrypted --password my-pass --salt my-salt -``` - -See options: - -```bash -poetry run ./yaptide/admin/simulators.py encrypt --help -``` - -#### Decrypt File - -Manually decrypt a binary: - -```bash -poetry run ./yaptide/admin/simulators.py decrypt --infile ./shieldhit.encrypted --outfile ./shieldhit --password my-pass --salt my-salt -``` - -See options: - -```bash -poetry run ./yaptide/admin/simulators.py decrypt --help -``` - -## Practical Examples - -### Uploading a New SHIELD-HIT12A Version - -Step-by-step example assuming SHIELD-HIT12A sources live in `$HOME/workspace/shieldhit`. - -**1) Compile the binary (from the source dir)** - -Enter the source tree: -```bash -cd "$HOME/workspace/shieldhit" -``` - -Compile with gfortran: -```bash -make gfortran -j -``` - -After build, the binary should be `./shieldhit`. Check its version: - -Verify binary version: -```bash -./shieldhit --version -# Expected shape (example): -# SHIELD-HIT12A -# Version: v1.1.0-8-g4ea3f147 -# Build date: Tue, 20 Jan 2026 11:28:34 +0100 -# SHIELD-HIT12A is up to date. -``` - -Optionally rename to capture host/build metadata (recommended): - -```bash -mv ./shieldhit ./shieldhit-lenovo-dev-g4ea3f147-make-gfortran -``` - -**2) Upload to S3 with encryption (run from project root `yaptide/`)** - -Switch to project root: -```bash -cd "$HOME/workspace/yaptide" -``` - -Upload with encryption to S3: -```bash -poetry run ./yaptide/admin/simulators.py upload \ - --bucket shieldhit \ - --file "$HOME/workspace/shieldhit/shieldhit-lenovo-dev-g4ea3f147-make-gfortran" \ - --encrypt -``` - -**3) Verify download and execution** - -Prepare temp download directory: -```bash -mkdir -p /tmp/sh-download -``` - -Download and decrypt from S3: -```bash -poetry run ./yaptide/admin/simulators.py download-shieldhit --dir /tmp/sh-download --decrypt -``` - -Run version check on downloaded binary: -```bash -/tmp/sh-download/shieldhit --version -``` - -If you want to validate inside the simulation worker container (without overwriting in-container simulators): - -```bash -docker run --rm -it \ - -v "/tmp/sh-download/shieldhit:/opt/test/shieldhit:ro" \ - --entrypoint /bin/bash yaptide_simulation_worker \ - -c "/opt/test/shieldhit --version" -``` - -## Environment Variables Configuration - -For S3-based deployment, configure these environment variables (`.env` file): - -```bash -# S3 General Configuration -S3_ENDPOINT=s3.mycompany.com -S3_ACCESS_KEY=AKIAIOSFODNN7EXAMPLE -S3_SECRET_KEY=wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY - -# SHIELD-HIT12A Configuration -S3_SHIELDHIT_BUCKET=yaptide-simulators -S3_SHIELDHIT_KEY=shieldhit_latest - -# FLUKA Configuration -S3_FLUKA_BUCKET=yaptide-simulators-fluka -S3_FLUKA_KEY=fluka_v2024.tar.gz.encrypted - -# Encryption Configuration -S3_ENCRYPTION_PASSWORD=my-secure-password -S3_ENCRYPTION_SALT=my-salt-value -``` \ No newline at end of file diff --git a/docs/backend/states.md b/docs/backend/states.md deleted file mode 100644 index 4e6bca4..0000000 --- a/docs/backend/states.md +++ /dev/null @@ -1,38 +0,0 @@ -# JOBS & TASKS - -Each simulation consists of 1 job and 1 or multiple tasks. - -## States - -All jobs and tasks which are part of any simulation are in some state: - - * `UNKNOWN` - it is used only for jobs which are not yet submitted but was created in the database, so can be fetched by UI. Simulation with job in this state has no tasks. Simulation with job in this state cannot be canceled. - * `PENDING` - jobs and tasks in this state are successfully submitted and are waiting for execution. - * `RUNNING` - jobs and tasks in this state are currently executing. - * `COMPLETED` - jobs and tasks in this state are successfully completed and they cannot be canceled. - * `FAILED` - jobs and tasks in this state failed and they cannot be canceled. - * `CANCELED` - jobs and tasks in this state are canceled. - - -Diagram below shows possible transitions of states. - -```mermaid ---- -title: Job states ---- -stateDiagram-v2 - [*] --> UNKNOWN - - UNKNOWN --> PENDING - PENDING --> RUNNING - RUNNING --> COMPLETED - COMPLETED --> [*] - - RUNNING --> FAILED - FAILED --> [*] - - PENDING --> CANCELED - RUNNING --> CANCELED - CANCELED --> [*] - -``` diff --git a/docs/backend/swagger.md b/docs/backend/swagger.md deleted file mode 100644 index 98cc7cf..0000000 --- a/docs/backend/swagger.md +++ /dev/null @@ -1 +0,0 @@ -!!swagger openapi.yaml!! \ No newline at end of file diff --git a/docs/backend/using_docker.md b/docs/backend/using_docker.md deleted file mode 100644 index 59a9a78..0000000 --- a/docs/backend/using_docker.md +++ /dev/null @@ -1,51 +0,0 @@ -# Using Docker - -You can build and run the app using docker compose. Docker deploy is similar to production deploy, so it's a good way to test the app. -To facilitate app development you can use following scripts to deploy app using docker containters: - -=== "Linux" - ```bash - scripts/start_with_docker.sh - ``` - -=== "Windows (PowerShell)" - ```powershell - scripts/start_with_docker.ps1 - ``` - -The script will build the app and run it in the background. The building is equivalent to running the following command: - -```shell -docker compose up --build --detach -``` - -If you have docker engine v25 (released 2024-01-19) or newer the you can profit from fast starting time and use following command: - -```shell -docker compose -f docker-compose.yml -f docker-compose.fast.yml up --build --detach -``` - -The script `scripts/start_with_docker.*` will use the fastest way to start the app. - -Once it's running, the app will be available at [http://localhost:5000](http://localhost:5000). If you get an error saying the container name is already in use, stop and remove the container and then try again. - -When you're ready to stop the containers, use the following command: - -```shell -docker compose down -``` - -## Docker database - -Now registering, updating and deleting users is available with the use of `db_manage.py` located in `yaptide/admin` folder. - -Once docker compose is running, you can use the following command, to get more information: -``` -docker exec -w /usr/local/app/ yaptide_flask ./yaptide/admin/db_manage.py --help -``` - -To add an user run: - -```bash -docker exec -w /usr/local/app/ yaptide_flask ./yaptide/admin/db_manage.py add-user admin --password password -``` diff --git a/docs/converter/index.md b/docs/converter/index.md deleted file mode 100644 index af23ad3..0000000 --- a/docs/converter/index.md +++ /dev/null @@ -1,5 +0,0 @@ -# Converter - -Git link: [https://github.com/yaptide/converter](https://github.com/yaptide/converter) - - * [Readme](readme.md) \ No newline at end of file diff --git a/docs/converter/readme.md b/docs/converter/readme.md deleted file mode 100644 index 89f056d..0000000 --- a/docs/converter/readme.md +++ /dev/null @@ -1,51 +0,0 @@ -# Yet Another Particle Transport IDE - converter - -The Converter of the project file (JSON file generated by the frontend part) into a set of input files for particle transport simulators: - -- SHIELD-HIT12A (beam.dat, mat.dat, geo.dat and detect.dat). -- Fluka - -## Installation - -Project make use of poetry for dependency management. If you do not have it installed, check official [poetry installation guide](https://python-poetry.org/docs/). -Project is configured to create virtual environment for you, so you do not need to worry about it. -Virtual environment is created in `.venv` folder in the root of the project. - -To install the project clone the repository and run the following command in the project directory: - -```shell -poetry install --without=test -``` - -This will result in command `yaptide-converter` available inside the virtual environment. -It can be accessed outside virtual environment by running `poetry run yaptide-converter`. -Alternatively, you can run `poetry shell` to enter virtual environment or check more examples in [Poetry documentation section: Activating the virtual environment](https://python-poetry.org/docs/basic-usage#activating-the-virtual-environment). - -## Usage - -The converter comes with a command line application. -It is capable of transforming the JSON project file (generated in the yaptide web interface) into a set of valid input files for SHIELD-HIT12A. - -To run the converter use the following command: - -```bash -python converter/main.py tests/shieldhit/resources/project.json workspace -``` - -## Testing - -To run the unit tests, you need to install test dependencies with: - -```shell -poetry install -``` - -Then you can run the tests with: - -```shell -poetry run pytest -``` - -## Credits - -This work was partially funded by EuroHPC PL Project, Smart Growth Operational Programme 4.2 diff --git a/docs/converter/tests.md b/docs/converter/tests.md deleted file mode 100644 index b8ba63c..0000000 --- a/docs/converter/tests.md +++ /dev/null @@ -1,16 +0,0 @@ -# Tests layout - -## General tests - -The general tests cover aspects of the converter common to all simulators. -Currently only the Figures part of geometry is tested here. -There is also `conftests.py` which contains fixture with location and content of a reference JSON project file. -As for now this file is based on the SHIELD-HIT12A simualator, but could be used as well for Fluka and Topas tests. - -### Reference files - -The reference JSON file is located in `tests/shieldhit/resources/project.json` together with the expected output files. - -## Simulator specific tests - -The simulator specific tests cover aspects of the converter specific to a given simulator. They are located in the `shieldhit`, `fluka` and `topas` directories. diff --git a/docs/documentation/index.md b/docs/documentation/index.md deleted file mode 100644 index e52817f..0000000 --- a/docs/documentation/index.md +++ /dev/null @@ -1,113 +0,0 @@ -# Technical documentation of the project - -# Developer documentation - -The documentation indended for developes is located in the `docs` folder. -We use [mkdocs](https://www.mkdocs.org) with [material for mkdocs](https://squidfunk.github.io/mkdocs-material/) customisation to generate the documentation in the HTML format. - -## Documentation structure - -### Technical documentation - -Technical documentation is written in markdown format and can be found in the [docs folder](https://github.com/yaptide/yaptide/tree/master/docs). - -### API reference - -The [API reference](swagger.md) is generated from the [swagger](https://swagger.io) yaml file. -The [swagger.yaml](https://github.com/yaptide/yaptide/blob/master/yaptide/static/openapi.yaml) file is located in the [yaptide/static](https://github.com/yaptide/yaptide/tree/master/yaptide/static) folder. This is the location from which Flask serve it when the backend is deployed. - -The HTML API documentation is rendered using [render_swagger](https://github.com/bharel/mkdocs-render-swagger-plugin) mkdocs plugin installed as [mkdocs-render-swagger-plugin](https://pypi.org/project/mkdocs-render-swagger-plugin/) pip package. -Its a bit abandoned project but it seems to be the only solution to generate static HTML from swagger yaml file. -The swagger documenation can be viewed locally by deploying backend and connecting to the backend server via `/api/docs` endpoint. -By using the `mkdocs-render-swagger-plugin` we can serve the documenation statically on github pages. -This way users may read the documenation without deploying the backend. - -The `mkdocs-render-swagger-plugin` expects the swagger yaml file to be located in the [docs folder](https://github.com/yaptide/yaptide/tree/master/docs). Therefore we modified the [docs/gen_ref_pages.py](https://github.com/yaptide/yaptide/blob/master/docs/gen_ref_pages.py) script to copy the swagger yaml file from the Flask static directory to the docs folder. The copy happens whenever the `mkdocs build` or `mkdocs serve` command is run. - -### Code reference - -The code reference is generated using [mkdocs-gen-files](https://github.com/oprypin/mkdocs-gen-files) mkdocs plugin. -We have a [docs/gen_ref_pages.py](https://github.com/yaptide/yaptide/blob/master/docs/gen_ref_pages.py) scripts that crawls through all Python files in the [yaptide folder](https://github.com/yaptide/yaptide/tree/master/yaptide) directory. Then it generates on-the-fly a markdown documentation from docstrings for each module, class and function. Also a on-the-fly `reference/SUMMARY.md` file is generated using [mkdocs-literate-nav](https://github.com/oprypin/mkdocs-literate-nav) mkdocs plugin. This file serves as left-side menu for the code reference. - -### Tests coverage - -The tests coverage is generated using [mkdocs-coverage](https://github.com/pawamoy/mkdocs-coverage) mkdocs plugin. This plugin expects a pytest coverage report in the `htmlcov` directory. - -## Github Pages deployment of the documentation - -Github pages deployment is done using [GitHub Actions docs workflow](https://github.com/yaptide/yaptide/blob/master/.github/workflows/docs.yml). -It deploys new version of the documentation whenever a new commit is pushed to the `master` branch. -The deployment includes generation of test coverage report and API reference documentation. - -## Local deployment of the documentation - -### Prerequisites - -First, user needs to install [poetry](https://python-poetry.org). - -### Quick Start Recipe - -**Step 1: Clone the repository** - -```bash -git clone https://github.com/yaptide/for_developers.git -``` - -Navigate to the repository: - -```bash -cd for_developers -``` - -**Step 2: Install dependencies** - -```bash -poetry install -``` - -**Step 3: Serve documentation locally** - -```bash -poetry run mkdocs serve -``` - -This will start a local webserver on port 8000. Open http://localhost:8000 in your browser to view the documentation. - -The documentation will automatically reload when you modify markdown files. - -### Building the documentation - -To build static HTML documentation run: - -```bash -poetry run mkdocs build -``` - -This will generate the documentation in the `site` folder. - -### Working with the technical documentation - -After modification of the markdown file the documenation served via `mkdocs serve` command will be updated automatically. - -### Working with the API reference - -After modification of the swagger yaml one needs to stop the `mkdocs serve` command and run it again. This is required as to re-generate the API reference documentation mkdocs needs to copy the swagger yaml file from the Flask static directory to the docs folder. -Please avoid modification and commiting of the swagger yaml file in the docs folder as it will be overwritten by the `mkdocs serve` command. - -### Working with the code reference - -After modification of the Python code one needs to stop the `mkdocs serve` command and run it again. - -### Working with the tests coverage - -To regeneate tests coverage one needs to run the following command: - -```bash -poetry run pytest --cov-report html:htmlcov --cov=yaptide -``` - -Note that this requires installation of dependencies for the backend and the tests: - -```bash -poetry install --only main,test -``` diff --git a/docs/frontend/authentication.md b/docs/frontend/authentication.md deleted file mode 100644 index 203ba3c..0000000 --- a/docs/frontend/authentication.md +++ /dev/null @@ -1,74 +0,0 @@ -# Sequence diagrams - -## Keycloak - -Overview of login and logout process using keycloak - -```mermaid -sequenceDiagram - autonumber - actor User - participant AuthService - participant Keycloak - participant Backend - - User ->> AuthService: Request login - AuthService ->> Keycloak: Redirect to keycloak login - User ->> Keycloak: Login with credentials - Keycloak ->> AuthService: Return authenticated token - AuthService ->> AuthService: Check token for access to yaptide - opt user has access - AuthService ->> Backend: Verify token with backend (POST /auth/keycloak) - Backend ->> Keycloak: Verify if token is correct - opt token verified - Keycloak ->> Backend: Signature verified - Backend ->> AuthService: Response with accessExp - AuthService ->> AuthService: Set token refresh interval based on accessExp - AuthService ->> User: Provide auth context - end - opt signature expired or invalid token or keycloak connection error - Backend ->> AuthService: Raise exception Forbidden (403) - end - end - opt user doesn't have access - AuthService ->> User: Message with access denied - end - loop Refresh backend connection every 3 minutes - AuthService ->> Backend: Refresh token (GET auth/refresh) - Backend ->> AuthService: Response with new backend access token in cookies - end - loop Refresh token every 1/3 of tokens lifetime - AuthService ->> Keycloak: Refresh token - Keycloak ->> AuthService: Updated token - end - User ->> AuthService: Logout - AuthService ->> Backend: Invalidate session (DELETE /auth/logout) - Backend ->> AuthService: Response with cookies deleted - AuthService ->> Keycloak: Logout - AuthService ->> User: Clear user data -``` - -## Non-Keycloak - -Overview of login and logout process while in demo or dev modes - -```mermaid -sequenceDiagram - autonumber - participant User - participant AuthService - participant Backend - - User ->> AuthService: Request Login - AuthService ->> Backend: Validate Credentials (POST /auth/login) - Backend ->> AuthService: Response with accessExp and set access and refresh tokens in cookies - AuthService ->> User: Provide Auth Context - loop Refresh backend connection every 3 minutes - AuthService ->> Backend: Refresh token (GET auth/refresh) - Backend ->> AuthService: Response with new backend access token in cookies - end - User ->> AuthService: Logout - AuthService ->> Backend: Invalidate session (DELETE /auth/logout) - Backend ->> AuthService: Response with cookies deleted - AuthService ->> User: Clear User Data -``` diff --git a/docs/frontend/examples.md b/docs/frontend/examples.md deleted file mode 100644 index 5973bbd..0000000 --- a/docs/frontend/examples.md +++ /dev/null @@ -1,18 +0,0 @@ -# Examples directory - -Directory public/examples in ui repository contains examples of simulation projects for the editor. To add a new example, create a new JSON file in this directory. The naming convention for the file is as follows: - -- ex{number of the example}.json - -Please note that the examples must be numbered consecutively. - -After creating new example file add it to examplesMap.json file located in -```bash -src/examples/examplesMap.json -``` -Example mapping shown be as followed: - -- "Proton pencil beam in water" : "ex1" - -Key specifies how example will be shown in front-end app and value is the name of corresponding example file. -Add entry to section of simulator for which examples is created. \ No newline at end of file diff --git a/docs/frontend/for_developers.md b/docs/frontend/for_developers.md deleted file mode 100644 index 8cfa31e..0000000 --- a/docs/frontend/for_developers.md +++ /dev/null @@ -1,128 +0,0 @@ -# yaptide web interface - -## For users - -The development version is unstable, without many features and with a lot of bugs. -It is released automatically after every commit to the main branch of this repository and is available for testing here: - - -The stable version is not released yet, have patience. - -### Loading a project file with results from a URL - -You can load a project file with results from a URL by adding `?` to the end of the editor URL. - -```txt -https://? -``` - -Example: - -To see the results, you need to navigate to the `Results` tab in the main menu. - -## For developers - -Start by downloading submodules: - -```bash -git submodule update --init --recursive -``` - -Before starting the local version of the web interface, you need to install the necessary dependencies by typing the command: - -```bash -npm install -``` - -To run the app in the development mode, type: - -```bash -npm run start -``` - -Then open [http://localhost:3000/web_dev](http://localhost:3000/web_dev) to view it in the web browser. - -The page will reload if you make edits. - -### App configuration - -Currently, app can be configured by setting the following environment variables in .env file in the main project directory (same as `package.json` is located). - -#### Setting communication with backend -UI can be deployed on different machine (with different IP) than a backend. During build phase the UI can be configured to talk to given backend instance via `REACT_APP_BACKEND_URL` environmental variable. To adjust it put following line in the `.env` file: - -If the backend is deployed as a set of docker containers, then Flask is listening on port **6000** for HTTP requests (HTTPS is supported only via NGINX proxy) on a host called `yaptide_flask`. -Additionally, the main NGINX proxy server listens on port **5000** for plain HTTP and **8443** for HTTPS. Relevant configuration can be found in this [config file](https://github.com/yaptide/yaptide/blob/master/nginx.conf) of backend - -**Make sure that both url for backend in `.env` and url typed in browser's address bar contain same domain part: either localhost (recommended) or 127.0.0.1. When e.g. in browser frontend will be opend from localhost:3000 and REACT_APP_BACKEND_URL will be set to http://127.0.0.1:5000 the difference in domains will cause browser to block setting access_token and refresh_token returned from backend as part of a response to login request. It's because cookie option samesite='Lax' set in backend. Without those cookies each refresh request will fail.** - -**When opening yaptide that runs from docker in chromium based browsers set ```REACT_APP_BACKEND_URL=https://localhost:8443``` in `.env`. Otherwise problem described above will appear. There are some differences how each browser implements security policies and those constantly change** - - -#### Other configuration options are: -- `REACT_APP_TARGET` - if set to `demo`, app will not require authentication and will be preloaded with demo results (this version is available at ) -- `REACT_APP_ALT_AUTH` - if set to `plg`, app will use plgrid authentication -- `REACT_APP_DEPLOYMENT` - if set to `dev`, configuration wil be editable from the browser console. For example, you can change the backend URL by typing `window.BACKEND_URL="http://mynew.url"` in the browser console. - -**It might happen that the Node will cache values for above variables and changes in .env file won't take effect. Close frontend server if it's running, open new terminal and enter into node interactive envirionment by executing `node` command. Then type process.env.NAME_OF_VARIABLE. If it's defined run `delete process.env.NAME_OF_VARIABLE`.** - -### Useful commands - -In order to easy configure the app, `cross-env` package for setting env is used with custom npm scripts. - -| Command | Description | -| --------------------- | --------------------------------------------------------------------------- | -| `npm run start` | Runs the app in the development mode. | -| `npm run build` | Builds the app for production to the `build` folder. | -| `npm run start-demo` | Runs the app in the development mode with demo results. | -| `npm run build-demo` | Builds the app for production to the `build` folder with demo results. | -| `npm run start-plg` | Runs the app in the development mode with plgrid authentication. | -| `npm run build-plg` | Builds the app for production to the `build` folder with plgrid authentication. | -| `npm run format` | Runs the formatter. | -| `npm run test` | Launches the test runner in the interactive watch mode. | - -For more commands, see `package.json`. - -### Building the app using the Dockerfile - -To build the docker image, type: - -```bash -docker build -t ui . -``` - -Then you can run the docker container named `ui` and serve the UI on port 80: - -```bash -docker run --rm -d -p 80:80/tcp --name ui ui -``` - -## Requirements - -- Node.js 20.x or higher -- Python 3.9+ -- pip and venv - -## Private docker image generated in the GHCR - -The docker image is generated automatically after every commit to the main branch of this repository. -The package is here - -The command below will run the docker container named `ui` and serve the UI on port 80: - -```bash -docker run --rm -d -p 80:80/tcp --name ui ghcr.io/yaptide/ui-web:master -``` - -## Credits - -This project adapts source code from the following libraries: - -- CSG javascript library - - parts of its code copied into `src/ThreeEditor/js/libs/csg/` - - adapted by adding types in a separate file -- ThreeJS Editor - - most of its code is copied from [mrdoob's GitHub repo](https://github.com/mrdoob/three.js/tree/r132/editor) into `src/ThreeEditor`, starting from v.132 - - the copied code is heavily adapted to "yaptide needs" - -This work was partially funded by EuroHPC PL Project, Smart Growth Operational Programme 4.2 diff --git a/docs/frontend/index.md b/docs/frontend/index.md deleted file mode 100644 index ae3c794..0000000 --- a/docs/frontend/index.md +++ /dev/null @@ -1,9 +0,0 @@ -# Ui (frontend) - -Github link: [https://github.com/yaptide/ui](https://github.com/yaptide/ui) - -The documenation contains: - - * [For developers](for_developers.md) - Readme from Ui - * [Authetication](authentication.md) - Description on Authentication mechanisms - * [Examples](examples.md) - Examples \ No newline at end of file diff --git a/docs/frontend/three_editor_development.md b/docs/frontend/three_editor_development.md deleted file mode 100644 index ef130ab..0000000 --- a/docs/frontend/three_editor_development.md +++ /dev/null @@ -1,213 +0,0 @@ -# How to implement additional commands for undo/redo functionality? - -### Basics - -After evaluating different design patterns for undo/redo we decided to use the [command-pattern](http://en.wikipedia.org/wiki/Command_pattern) for implementing undo/redo functionality in the three.js-editor. - -This means that every action is encapsulated in a command-object which contains all the relevant information to restore the previous state. - -In our implementation we store the old and the new state separately (we don't store the complete state but rather the attribute and value which has changed). -It would also be possible to only store the difference between the old and the new state. - -**Before implementing your own command you should look if you can't reuse one of the already existing ones.** - -For numbers, strings or booleans the Set...ValueCommand-commands can be used. -Then there are separate commands for: - -- setting a color property (THREE.Color) -- setting maps (THREE.Texture) -- setting geometries -- setting materials -- setting position, rotation and scale - -### Template for new commands - -Every command needs a constructor. In the constructor - -```javascript -function DoSomethingCommand(editor) { - Command.call(this, editor); // Required: Call default constructor - - this.type = 'DoSomethingCommand'; // Required: has to match the object-name! - this.name = 'Set/Do/Update Something'; // Required: description of the command, used in Sidebar.History - - // TODO: store all the relevant information needed to - // restore the old and the new state -} -``` - -And as part of the prototype you need to implement four functions - -- **execute:** which is also used for redo -- **undo:** which reverts the changes made by 'execute' -- **toJSON:** which serializes the command so that the undo/redo-history can be preserved across a browser refresh -- **fromJSON:** which deserializes the command - -```javascript -DoSomethingCommand.prototype = { - execute: function () { - // TODO: apply changes to 'object' to reach the new state - }, - - undo: function () { - // TODO: restore 'object' to old state - }, - - toJSON: function () { - var output = Command.prototype.toJSON.call(this); // Required: Call 'toJSON'-method of prototype 'Command' - - // TODO: serialize all the necessary information as part of 'output' (JSON-format) - // so that it can be restored in 'fromJSON' - - return output; - }, - - fromJSON: function (json) { - Command.prototype.fromJSON.call(this, json); // Required: Call 'fromJSON'-method of prototype 'Command' - - // TODO: restore command from json - } -}; -``` - -### Executing a command - -To execute a command we need an instance of the main editor-object. The editor-object functions as the only entry point through which all commands have to go to be added as part of the undo/redo-history. -On **editor** we then call **.execute(...)\*** with the new command-object which in turn calls **history.execute(...)** and adds the command to the undo-stack. - -```javascript -editor.execute(new DoSomethingCommand()); -``` - -### Updatable commands - -Some commands are also **updatable**. By default a command is not updatable. Making a command updatable means that you -have to implement a fifth function 'update' as part of the prototype. In it only the 'new' state gets updated while the old one stays the same. - -Here as an example is the update-function of **SetColorCommand**: - -```javascript -update: function ( cmd ) { - - this.newValue = cmd.newValue; - -}, - -``` - -#### List of updatable commands - -- SetColorCommand -- SetGeometryCommand -- SetMaterialColorCommand -- SetMaterialValueCommand -- SetPositionCommand -- SetRotationCommand -- SetScaleCommand -- SetValueCommand -- SetScriptValueCommand - -The idea behind 'updatable commands' is that two commands of the same type which occur -within a short period of time should be merged into one. -**For example:** Dragging with your mouse over the x-position field in the sidebar -leads to hundreds of minor changes to the x-position. -The user expectation is not to undo every single change that happened while he dragged -the mouse cursor but rather to go back to the position before he started to drag his mouse. - -When editing a script the changes are also merged into one undo-step. - - -# Writing unit tests for undo-redo commands - -### Overview - -Writing unit tests for undo/redo commands is easy. -The main idea to simulate a scene, execute actions and perform undo and redo. -Following steps are required. - -1. Create a new unit test file -2. Include the new command and the unit test file in the editor's test suite -3. Write the test -4. Execute the test - -Each of the listed steps will now be described in detail. - -### 1. Create a new unit test file - -Create a new file in path `test/unit/editor/TestDoSomethingCommand.js`. - -### 2. Include the new command in the editor test suite - -Navigate to the editor test suite `test/unit/unittests_editor.html` and open it. -Within the file, go to the `` and include the new command: - -```html -// - -//... - - -// add this line - -//... -``` - -It is recommended to keep the script inclusions in alphabetical order, if possible. - -Next, in the same file, go to `` and include the test file for the new command: - -```html -// - -//... - - -// add this line - -//... -``` - -Again, keeping the alphabetical order is recommended. - -### 3. Write the test - -#### Template - -Open the unit test file `test/unit/editor/TestDoSomethingCommand.js` and paste following code: - -```javascript -module('DoSomethingCommand'); - -test('Test DoSomethingCommand (Undo and Redo)', function () { - var editor = new Editor(); - - var box = aBox('Name your box'); - - // other available objects from "CommonUtilities.js" - // var sphere = aSphere( 'Name your sphere' ); - // var pointLight = aPointLight( 'Name your pointLight' ); - // var perspectiveCamera = aPerspectiveCamera( 'Name your perspectiveCamera' ); - - // in most cases you'll need to add the object to work with - editor.execute(new AddObjectCommand(editor, box)); - - // your test begins here... -}); -``` - -The predefined code is just meant to ease the development, you do not have to stick with it. -However, the test should cover at least one `editor.execute()`, one `editor.undo()` and one `editor.redo()` call. - -Best practice is to call `editor.execute( new DoSomethingCommand( {custom parameters} ) )` **twice**. Since you'll have to do one undo (go one step back), it is recommended to have a custom state for comparison. Try to avoid assertions `ok()` against default values. - -#### Assertions - -After performing `editor.execute()` twice, you can do your first assertion to check whether the executes are done correctly. - -Next, you perform `editor.undo()` and check if the last action was undone. - -Finally, perform `editor.redo()` and verify if the values are as expected. - -### 4. Execute the test - -Open the editor's unit test suite `test/unit/unittests_editor.html` in your browser and check the results from the test framework. diff --git a/docs/gen_ref_pages.py b/docs/gen_ref_pages.py deleted file mode 100644 index 1517b78..0000000 --- a/docs/gen_ref_pages.py +++ /dev/null @@ -1,28 +0,0 @@ -"""Generate the code reference pages and navigation.""" - -from pathlib import Path - -import mkdocs_gen_files - -nav = mkdocs_gen_files.Nav() - -for path in sorted(Path("yaptide").rglob("*.py")): - module_path = path.relative_to("yaptide").with_suffix("") - doc_path = path.relative_to("yaptide").with_suffix(".md") - full_doc_path = Path("reference", doc_path) - - parts = tuple(module_path.parts) - - if parts[-1] == "__init__" or parts[-1] == "cli": - continue - - nav[parts] = doc_path.as_posix() - - with mkdocs_gen_files.open(full_doc_path, "w") as fd: - ident = ".".join(parts) - fd.write(f"::: {ident}") - - mkdocs_gen_files.set_edit_path(full_doc_path, path) - -with mkdocs_gen_files.open("reference/SUMMARY.md", "w") as nav_file: - nav_file.writelines(nav.build_literate_nav()) diff --git a/docs/index.md b/docs/index.md deleted file mode 100644 index 33cb9fc..0000000 --- a/docs/index.md +++ /dev/null @@ -1 +0,0 @@ -# Main page \ No newline at end of file diff --git a/mkdocs.yml b/mkdocs.yml deleted file mode 100644 index f0e7341..0000000 --- a/mkdocs.yml +++ /dev/null @@ -1,74 +0,0 @@ -site_name: Yaptide for developers -site_url: https://yaptide.github.io/for_developers -repo_url: https://github.com/yaptide/for_developers -repo_name: yaptide/for_developers -copyright: Copyright © 2024 the Yaptide team - -theme: - name: material - features: - - content.code.annotate - - content.code.copy - - content.tabs.link - - navigation.tabs - - navigation.tabs.sticky - - navigation.top - -nav: -- Backend: - - Overview: backend/index.md - - For developers: backend/for_developers.md - - Using docker: backend/using_docker.md - - Simulator Management: backend/simulator_management.md - - API: backend/swagger.md - - Jobs and tasks: backend/states.md - - Persistent storage: backend/persistency.md - - Docker images on GHCR: backend/ghcr_packages.md - - Test coverage: coverage.md - - Code Reference: reference/ -- Frontend: - - Overview: frontend/index.md - - For developers: frontend/for_developers.md - - Examples: frontend/examples.md - - Authentication: frontend/authentication.md - - Examples: frontend/examples.md - - ThreeJs development: frontend/three_editor_development.md -- Converter: - - Overview: converter/index.md - - Readme: converter/readme.md - - Tests: converter/tests.md -- Editing documentation: documentation/index.md - -plugins: -- search -- gen-files: - scripts: - - docs/gen_ref_pages.py -- literate-nav: - nav_file: SUMMARY.md -- section-index -- coverage: - page_name: coverage - html_report_dir: htmlcov -- render_swagger -- mkdocstrings: - enable_inventory: true - handlers: - python: - paths: [yaptide] - options: - show_if_no_docstring: true - line_numbers: true - inheritance_diagram: true - separate_signature: true - show_root_heading: true - show_root_members_full_path: false - -markdown_extensions: - - pymdownx.superfences: - custom_fences: - - name: mermaid - class: mermaid - format: !!python/name:pymdownx.superfences.fence_code_format - - pymdownx.tabbed: - alternate_style: true diff --git a/package.json b/package.json new file mode 100644 index 0000000..9672a99 --- /dev/null +++ b/package.json @@ -0,0 +1,21 @@ +{ + "name": "yaptide-developer-docs", + "type": "module", + "version": "2.0.0", + "scripts": { + "dev": "astro dev", + "start": "astro dev", + "build": "astro build", + "preview": "astro preview", + "astro": "astro" + }, + "dependencies": { + "@astrojs/sitemap": "3.2.1", + "@astrojs/starlight": "^0.37.6", + "astro": "^5.6.1", + "sharp": "^0.34.2" + }, + "overrides": { + "@astrojs/sitemap": "3.2.1" + } +} \ No newline at end of file diff --git a/poetry.lock b/poetry.lock deleted file mode 100644 index e5937a6..0000000 --- a/poetry.lock +++ /dev/null @@ -1,1054 +0,0 @@ -# This file is automatically @generated by Poetry 2.1.1 and should not be changed by hand. - -[[package]] -name = "astunparse" -version = "1.6.3" -description = "An AST unparser for Python" -optional = false -python-versions = "*" -groups = ["main"] -markers = "python_version < \"3.9\"" -files = [ - {file = "astunparse-1.6.3-py2.py3-none-any.whl", hash = "sha256:c2652417f2c8b5bb325c885ae329bdf3f86424075c4fd1a128674bc6fba4b8e8"}, - {file = "astunparse-1.6.3.tar.gz", hash = "sha256:5ad93a8456f0d084c3456d059fd9a92cce667963232cbf763eac3bc5b7940872"}, -] - -[package.dependencies] -six = ">=1.6.1,<2.0" -wheel = ">=0.23.0,<1.0" - -[[package]] -name = "babel" -version = "2.16.0" -description = "Internationalization utilities" -optional = false -python-versions = ">=3.8" -groups = ["main"] -files = [ - {file = "babel-2.16.0-py3-none-any.whl", hash = "sha256:368b5b98b37c06b7daf6696391c3240c938b37767d4584413e8438c5c435fa8b"}, - {file = "babel-2.16.0.tar.gz", hash = "sha256:d1f3554ca26605fe173f3de0c65f750f5a42f924499bf134de6423582298e316"}, -] - -[package.dependencies] -pytz = {version = ">=2015.7", markers = "python_version < \"3.9\""} - -[package.extras] -dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] - -[[package]] -name = "certifi" -version = "2024.8.30" -description = "Python package for providing Mozilla's CA Bundle." -optional = false -python-versions = ">=3.6" -groups = ["main"] -files = [ - {file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"}, - {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"}, -] - -[[package]] -name = "charset-normalizer" -version = "3.4.0" -description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -optional = false -python-versions = ">=3.7.0" -groups = ["main"] -files = [ - {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4f9fc98dad6c2eaa32fc3af1417d95b5e3d08aff968df0cd320066def971f9a6"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0de7b687289d3c1b3e8660d0741874abe7888100efe14bd0f9fd7141bcbda92b"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5ed2e36c3e9b4f21dd9422f6893dec0abf2cca553af509b10cd630f878d3eb99"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40d3ff7fc90b98c637bda91c89d51264a3dcf210cade3a2c6f838c7268d7a4ca"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1110e22af8ca26b90bd6364fe4c763329b0ebf1ee213ba32b68c73de5752323d"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:86f4e8cca779080f66ff4f191a685ced73d2f72d50216f7112185dc02b90b9b7"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f683ddc7eedd742e2889d2bfb96d69573fde1d92fcb811979cdb7165bb9c7d3"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27623ba66c183eca01bf9ff833875b459cad267aeeb044477fedac35e19ba907"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f606a1881d2663630ea5b8ce2efe2111740df4b687bd78b34a8131baa007f79b"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0b309d1747110feb25d7ed6b01afdec269c647d382c857ef4663bbe6ad95a912"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:136815f06a3ae311fae551c3df1f998a1ebd01ddd424aa5603a4336997629e95"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:14215b71a762336254351b00ec720a8e85cada43b987da5a042e4ce3e82bd68e"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:79983512b108e4a164b9c8d34de3992f76d48cadc9554c9e60b43f308988aabe"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-win32.whl", hash = "sha256:c94057af19bc953643a33581844649a7fdab902624d2eb739738a30e2b3e60fc"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:55f56e2ebd4e3bc50442fbc0888c9d8c94e4e06a933804e2af3e89e2f9c1c749"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0d99dd8ff461990f12d6e42c7347fd9ab2532fb70e9621ba520f9e8637161d7c"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c57516e58fd17d03ebe67e181a4e4e2ccab1168f8c2976c6a334d4f819fe5944"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6dba5d19c4dfab08e58d5b36304b3f92f3bd5d42c1a3fa37b5ba5cdf6dfcbcee"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf4475b82be41b07cc5e5ff94810e6a01f276e37c2d55571e3fe175e467a1a1c"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce031db0408e487fd2775d745ce30a7cd2923667cf3b69d48d219f1d8f5ddeb6"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ff4e7cdfdb1ab5698e675ca622e72d58a6fa2a8aa58195de0c0061288e6e3ea"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3710a9751938947e6327ea9f3ea6332a09bf0ba0c09cae9cb1f250bd1f1549bc"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82357d85de703176b5587dbe6ade8ff67f9f69a41c0733cf2425378b49954de5"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:47334db71978b23ebcf3c0f9f5ee98b8d65992b65c9c4f2d34c2eaf5bcaf0594"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8ce7fd6767a1cc5a92a639b391891bf1c268b03ec7e021c7d6d902285259685c"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f1a2f519ae173b5b6a2c9d5fa3116ce16e48b3462c8b96dfdded11055e3d6365"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:63bc5c4ae26e4bc6be6469943b8253c0fd4e4186c43ad46e713ea61a0ba49129"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bcb4f8ea87d03bc51ad04add8ceaf9b0f085ac045ab4d74e73bbc2dc033f0236"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-win32.whl", hash = "sha256:9ae4ef0b3f6b41bad6366fb0ea4fc1d7ed051528e113a60fa2a65a9abb5b1d99"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cee4373f4d3ad28f1ab6290684d8e2ebdb9e7a1b74fdc39e4c211995f77bec27"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0713f3adb9d03d49d365b70b84775d0a0d18e4ab08d12bc46baa6132ba78aaf6"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:de7376c29d95d6719048c194a9cf1a1b0393fbe8488a22008610b0361d834ecf"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4a51b48f42d9358460b78725283f04bddaf44a9358197b889657deba38f329db"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b295729485b06c1a0683af02a9e42d2caa9db04a373dc38a6a58cdd1e8abddf1"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee803480535c44e7f5ad00788526da7d85525cfefaf8acf8ab9a310000be4b03"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d59d125ffbd6d552765510e3f31ed75ebac2c7470c7274195b9161a32350284"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cda06946eac330cbe6598f77bb54e690b4ca93f593dee1568ad22b04f347c15"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07afec21bbbbf8a5cc3651aa96b980afe2526e7f048fdfb7f1014d84acc8b6d8"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6b40e8d38afe634559e398cc32b1472f376a4099c75fe6299ae607e404c033b2"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b8dcd239c743aa2f9c22ce674a145e0a25cb1566c495928440a181ca1ccf6719"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:84450ba661fb96e9fd67629b93d2941c871ca86fc38d835d19d4225ff946a631"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:44aeb140295a2f0659e113b31cfe92c9061622cadbc9e2a2f7b8ef6b1e29ef4b"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1db4e7fefefd0f548d73e2e2e041f9df5c59e178b4c72fbac4cc6f535cfb1565"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-win32.whl", hash = "sha256:5726cf76c982532c1863fb64d8c6dd0e4c90b6ece9feb06c9f202417a31f7dd7"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:b197e7094f232959f8f20541ead1d9862ac5ebea1d58e9849c1bf979255dfac9"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:dd4eda173a9fcccb5f2e2bd2a9f423d180194b1bf17cf59e3269899235b2a114"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e9e3c4c9e1ed40ea53acf11e2a386383c3304212c965773704e4603d589343ed"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:92a7e36b000bf022ef3dbb9c46bfe2d52c047d5e3f3343f43204263c5addc250"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54b6a92d009cbe2fb11054ba694bc9e284dad30a26757b1e372a1fdddaf21920"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ffd9493de4c922f2a38c2bf62b831dcec90ac673ed1ca182fe11b4d8e9f2a64"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35c404d74c2926d0287fbd63ed5d27eb911eb9e4a3bb2c6d294f3cfd4a9e0c23"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4796efc4faf6b53a18e3d46343535caed491776a22af773f366534056c4e1fbc"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7fdd52961feb4c96507aa649550ec2a0d527c086d284749b2f582f2d40a2e0d"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:92db3c28b5b2a273346bebb24857fda45601aef6ae1c011c0a997106581e8a88"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ab973df98fc99ab39080bfb0eb3a925181454d7c3ac8a1e695fddfae696d9e90"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4b67fdab07fdd3c10bb21edab3cbfe8cf5696f453afce75d815d9d7223fbe88b"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:aa41e526a5d4a9dfcfbab0716c7e8a1b215abd3f3df5a45cf18a12721d31cb5d"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ffc519621dce0c767e96b9c53f09c5d215578e10b02c285809f76509a3931482"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-win32.whl", hash = "sha256:f19c1585933c82098c2a520f8ec1227f20e339e33aca8fa6f956f6691b784e67"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:707b82d19e65c9bd28b81dde95249b07bf9f5b90ebe1ef17d9b57473f8a64b7b"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:dbe03226baf438ac4fda9e2d0715022fd579cb641c4cf639fa40d53b2fe6f3e2"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd9a8bd8900e65504a305bf8ae6fa9fbc66de94178c420791d0293702fce2df7"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8831399554b92b72af5932cdbbd4ddc55c55f631bb13ff8fe4e6536a06c5c51"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a14969b8691f7998e74663b77b4c36c0337cb1df552da83d5c9004a93afdb574"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dcaf7c1524c0542ee2fc82cc8ec337f7a9f7edee2532421ab200d2b920fc97cf"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425c5f215d0eecee9a56cdb703203dda90423247421bf0d67125add85d0c4455"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:d5b054862739d276e09928de37c79ddeec42a6e1bfc55863be96a36ba22926f6"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:f3e73a4255342d4eb26ef6df01e3962e73aa29baa3124a8e824c5d3364a65748"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:2f6c34da58ea9c1a9515621f4d9ac379871a8f21168ba1b5e09d74250de5ad62"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:f09cb5a7bbe1ecae6e87901a2eb23e0256bb524a79ccc53eb0b7629fbe7677c4"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:0099d79bdfcf5c1f0c2c72f91516702ebf8b0b8ddd8905f97a8aecf49712c621"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-win32.whl", hash = "sha256:9c98230f5042f4945f957d006edccc2af1e03ed5e37ce7c373f00a5a4daa6149"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:62f60aebecfc7f4b82e3f639a7d1433a20ec32824db2199a11ad4f5e146ef5ee"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:af73657b7a68211996527dbfeffbb0864e043d270580c5aef06dc4b659a4b578"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cab5d0b79d987c67f3b9e9c53f54a61360422a5a0bc075f43cab5621d530c3b6"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9289fd5dddcf57bab41d044f1756550f9e7cf0c8e373b8cdf0ce8773dc4bd417"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b493a043635eb376e50eedf7818f2f322eabbaa974e948bd8bdd29eb7ef2a51"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fa2566ca27d67c86569e8c85297aaf413ffab85a8960500f12ea34ff98e4c41"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8e538f46104c815be19c975572d74afb53f29650ea2025bbfaef359d2de2f7f"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fd30dc99682dc2c603c2b315bded2799019cea829f8bf57dc6b61efde6611c8"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2006769bd1640bdf4d5641c69a3d63b71b81445473cac5ded39740a226fa88ab"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:dc15e99b2d8a656f8e666854404f1ba54765871104e50c8e9813af8a7db07f12"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:ab2e5bef076f5a235c3774b4f4028a680432cded7cad37bba0fd90d64b187d19"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:4ec9dd88a5b71abfc74e9df5ebe7921c35cbb3b641181a531ca65cdb5e8e4dea"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:43193c5cda5d612f247172016c4bb71251c784d7a4d9314677186a838ad34858"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:aa693779a8b50cd97570e5a0f343538a8dbd3e496fa5dcb87e29406ad0299654"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-win32.whl", hash = "sha256:7706f5850360ac01d80c89bcef1640683cc12ed87f42579dab6c5d3ed6888613"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:c3e446d253bd88f6377260d07c895816ebf33ffffd56c1c792b13bff9c3e1ade"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:980b4f289d1d90ca5efcf07958d3eb38ed9c0b7676bf2831a54d4f66f9c27dfa"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f28f891ccd15c514a0981f3b9db9aa23d62fe1a99997512b0491d2ed323d229a"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8aacce6e2e1edcb6ac625fb0f8c3a9570ccc7bfba1f63419b3769ccf6a00ed0"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd7af3717683bea4c87acd8c0d3d5b44d56120b26fd3f8a692bdd2d5260c620a"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ff2ed8194587faf56555927b3aa10e6fb69d931e33953943bc4f837dfee2242"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e91f541a85298cf35433bf66f3fab2a4a2cff05c127eeca4af174f6d497f0d4b"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:309a7de0a0ff3040acaebb35ec45d18db4b28232f21998851cfa709eeff49d62"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:285e96d9d53422efc0d7a17c60e59f37fbf3dfa942073f666db4ac71e8d726d0"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:5d447056e2ca60382d460a604b6302d8db69476fd2015c81e7c35417cfabe4cd"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:20587d20f557fe189b7947d8e7ec5afa110ccf72a3128d61a2a387c3313f46be"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:130272c698667a982a5d0e626851ceff662565379baf0ff2cc58067b81d4f11d"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:ab22fbd9765e6954bc0bcff24c25ff71dcbfdb185fcdaca49e81bac68fe724d3"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7782afc9b6b42200f7362858f9e73b1f8316afb276d316336c0ec3bd73312742"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-win32.whl", hash = "sha256:2de62e8801ddfff069cd5c504ce3bc9672b23266597d4e4f50eda28846c322f2"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:95c3c157765b031331dd4db3c775e58deaee050a3042fcad72cbc4189d7c8dca"}, - {file = "charset_normalizer-3.4.0-py3-none-any.whl", hash = "sha256:fe9f97feb71aa9896b81973a7bbada8c49501dc73e58a10fcef6663af95e5079"}, - {file = "charset_normalizer-3.4.0.tar.gz", hash = "sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e"}, -] - -[[package]] -name = "click" -version = "8.1.7" -description = "Composable command line interface toolkit" -optional = false -python-versions = ">=3.7" -groups = ["main"] -files = [ - {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, - {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "platform_system == \"Windows\""} - -[[package]] -name = "colorama" -version = "0.4.6" -description = "Cross-platform colored terminal text." -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" -groups = ["main"] -files = [ - {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, - {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, -] - -[[package]] -name = "ghp-import" -version = "2.1.0" -description = "Copy your docs directly to the gh-pages branch." -optional = false -python-versions = "*" -groups = ["main"] -files = [ - {file = "ghp-import-2.1.0.tar.gz", hash = "sha256:9c535c4c61193c2df8871222567d7fd7e5014d835f97dc7b7439069e2413d343"}, - {file = "ghp_import-2.1.0-py3-none-any.whl", hash = "sha256:8337dd7b50877f163d4c0289bc1f1c7f127550241988d568c1db512c4324a619"}, -] - -[package.dependencies] -python-dateutil = ">=2.8.1" - -[package.extras] -dev = ["flake8", "markdown", "twine", "wheel"] - -[[package]] -name = "griffe" -version = "1.4.0" -description = "Signatures for entire Python programs. Extract the structure, the frame, the skeleton of your project, to generate API documentation or find breaking changes in your API." -optional = false -python-versions = ">=3.8" -groups = ["main"] -files = [ - {file = "griffe-1.4.0-py3-none-any.whl", hash = "sha256:e589de8b8c137e99a46ec45f9598fc0ac5b6868ce824b24db09c02d117b89bc5"}, - {file = "griffe-1.4.0.tar.gz", hash = "sha256:8fccc585896d13f1221035d32c50dec65830c87d23f9adb9b1e6f3d63574f7f5"}, -] - -[package.dependencies] -astunparse = {version = ">=1.6", markers = "python_version < \"3.9\""} -colorama = ">=0.4" - -[[package]] -name = "idna" -version = "3.10" -description = "Internationalized Domain Names in Applications (IDNA)" -optional = false -python-versions = ">=3.6" -groups = ["main"] -files = [ - {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, - {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, -] - -[package.extras] -all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] - -[[package]] -name = "importlib-metadata" -version = "8.5.0" -description = "Read metadata from Python packages" -optional = false -python-versions = ">=3.8" -groups = ["main"] -markers = "python_version < \"3.10\"" -files = [ - {file = "importlib_metadata-8.5.0-py3-none-any.whl", hash = "sha256:45e54197d28b7a7f1559e60b95e7c567032b602131fbd588f1497f47880aa68b"}, - {file = "importlib_metadata-8.5.0.tar.gz", hash = "sha256:71522656f0abace1d072b9e5481a48f07c138e00f079c38c8f883823f9c26bd7"}, -] - -[package.dependencies] -zipp = ">=3.20" - -[package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] -cover = ["pytest-cov"] -doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -enabler = ["pytest-enabler (>=2.2)"] -perf = ["ipython"] -test = ["flufl.flake8", "importlib-resources (>=1.3) ; python_version < \"3.9\"", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] -type = ["pytest-mypy"] - -[[package]] -name = "jinja2" -version = "3.1.5" -description = "A very fast and expressive template engine." -optional = false -python-versions = ">=3.7" -groups = ["main"] -files = [ - {file = "jinja2-3.1.5-py3-none-any.whl", hash = "sha256:aba0f4dc9ed8013c424088f68a5c226f7d6097ed89b246d7749c2ec4175c6adb"}, - {file = "jinja2-3.1.5.tar.gz", hash = "sha256:8fefff8dc3034e27bb80d67c671eb8a9bc424c0ef4c0826edbff304cceff43bb"}, -] - -[package.dependencies] -MarkupSafe = ">=2.0" - -[package.extras] -i18n = ["Babel (>=2.7)"] - -[[package]] -name = "markdown" -version = "3.7" -description = "Python implementation of John Gruber's Markdown." -optional = false -python-versions = ">=3.8" -groups = ["main"] -files = [ - {file = "Markdown-3.7-py3-none-any.whl", hash = "sha256:7eb6df5690b81a1d7942992c97fad2938e956e79df20cbc6186e9c3a77b1c803"}, - {file = "markdown-3.7.tar.gz", hash = "sha256:2ae2471477cfd02dbbf038d5d9bc226d40def84b4fe2986e49b59b6b472bbed2"}, -] - -[package.dependencies] -importlib-metadata = {version = ">=4.4", markers = "python_version < \"3.10\""} - -[package.extras] -docs = ["mdx-gh-links (>=0.2)", "mkdocs (>=1.5)", "mkdocs-gen-files", "mkdocs-literate-nav", "mkdocs-nature (>=0.6)", "mkdocs-section-index", "mkdocstrings[python]"] -testing = ["coverage", "pyyaml"] - -[[package]] -name = "markupsafe" -version = "2.1.5" -description = "Safely add untrusted strings to HTML/XML markup." -optional = false -python-versions = ">=3.7" -groups = ["main"] -files = [ - {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, - {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, -] - -[[package]] -name = "mergedeep" -version = "1.3.4" -description = "A deep merge function for 🐍." -optional = false -python-versions = ">=3.6" -groups = ["main"] -files = [ - {file = "mergedeep-1.3.4-py3-none-any.whl", hash = "sha256:70775750742b25c0d8f36c55aed03d24c3384d17c951b3175d898bd778ef0307"}, - {file = "mergedeep-1.3.4.tar.gz", hash = "sha256:0096d52e9dad9939c3d975a774666af186eda617e6ca84df4c94dec30004f2a8"}, -] - -[[package]] -name = "mkdocs" -version = "1.5.3" -description = "Project documentation with Markdown." -optional = false -python-versions = ">=3.7" -groups = ["main"] -files = [ - {file = "mkdocs-1.5.3-py3-none-any.whl", hash = "sha256:3b3a78e736b31158d64dbb2f8ba29bd46a379d0c6e324c2246c3bc3d2189cfc1"}, - {file = "mkdocs-1.5.3.tar.gz", hash = "sha256:eb7c99214dcb945313ba30426c2451b735992c73c2e10838f76d09e39ff4d0e2"}, -] - -[package.dependencies] -click = ">=7.0" -colorama = {version = ">=0.4", markers = "platform_system == \"Windows\""} -ghp-import = ">=1.0" -importlib-metadata = {version = ">=4.3", markers = "python_version < \"3.10\""} -jinja2 = ">=2.11.1" -markdown = ">=3.2.1" -markupsafe = ">=2.0.1" -mergedeep = ">=1.3.4" -packaging = ">=20.5" -pathspec = ">=0.11.1" -platformdirs = ">=2.2.0" -pyyaml = ">=5.1" -pyyaml-env-tag = ">=0.1" -watchdog = ">=2.0" - -[package.extras] -i18n = ["babel (>=2.9.0)"] -min-versions = ["babel (==2.9.0)", "click (==7.0)", "colorama (==0.4) ; platform_system == \"Windows\"", "ghp-import (==1.0)", "importlib-metadata (==4.3) ; python_version < \"3.10\"", "jinja2 (==2.11.1)", "markdown (==3.2.1)", "markupsafe (==2.0.1)", "mergedeep (==1.3.4)", "packaging (==20.5)", "pathspec (==0.11.1)", "platformdirs (==2.2.0)", "pyyaml (==5.1)", "pyyaml-env-tag (==0.1)", "typing-extensions (==3.10) ; python_version < \"3.8\"", "watchdog (==2.0)"] - -[[package]] -name = "mkdocs-autorefs" -version = "1.2.0" -description = "Automatically link across pages in MkDocs." -optional = false -python-versions = ">=3.8" -groups = ["main"] -files = [ - {file = "mkdocs_autorefs-1.2.0-py3-none-any.whl", hash = "sha256:d588754ae89bd0ced0c70c06f58566a4ee43471eeeee5202427da7de9ef85a2f"}, - {file = "mkdocs_autorefs-1.2.0.tar.gz", hash = "sha256:a86b93abff653521bda71cf3fc5596342b7a23982093915cb74273f67522190f"}, -] - -[package.dependencies] -Markdown = ">=3.3" -markupsafe = ">=2.0.1" -mkdocs = ">=1.1" - -[[package]] -name = "mkdocs-coverage" -version = "1.0.0" -description = "MkDocs plugin to integrate your coverage HTML report into your site." -optional = false -python-versions = ">=3.8" -groups = ["main"] -files = [ - {file = "mkdocs_coverage-1.0.0-py3-none-any.whl", hash = "sha256:29e3c0fc89d1107ee22a0fbe1a9aa927550f97cf2e2682fcad5672ca3f2e14dd"}, - {file = "mkdocs_coverage-1.0.0.tar.gz", hash = "sha256:3ebe0837c29458f64843c45cbb6618859634326867056d030621ab164649904c"}, -] - -[package.dependencies] -mkdocs = ">=1.2" - -[[package]] -name = "mkdocs-gen-files" -version = "0.5.0" -description = "MkDocs plugin to programmatically generate documentation pages during the build" -optional = false -python-versions = ">=3.7" -groups = ["main"] -files = [ - {file = "mkdocs_gen_files-0.5.0-py3-none-any.whl", hash = "sha256:7ac060096f3f40bd19039e7277dd3050be9a453c8ac578645844d4d91d7978ea"}, - {file = "mkdocs_gen_files-0.5.0.tar.gz", hash = "sha256:4c7cf256b5d67062a788f6b1d035e157fc1a9498c2399be9af5257d4ff4d19bc"}, -] - -[package.dependencies] -mkdocs = ">=1.0.3" - -[[package]] -name = "mkdocs-literate-nav" -version = "0.6.1" -description = "MkDocs plugin to specify the navigation in Markdown instead of YAML" -optional = false -python-versions = ">=3.7" -groups = ["main"] -files = [ - {file = "mkdocs_literate_nav-0.6.1-py3-none-any.whl", hash = "sha256:e70bdc4a07050d32da79c0b697bd88e9a104cf3294282e9cb20eec94c6b0f401"}, - {file = "mkdocs_literate_nav-0.6.1.tar.gz", hash = "sha256:78a7ab6d878371728acb0cdc6235c9b0ffc6e83c997b037f4a5c6ff7cef7d759"}, -] - -[package.dependencies] -mkdocs = ">=1.0.3" - -[[package]] -name = "mkdocs-material" -version = "9.5.18" -description = "Documentation that simply works" -optional = false -python-versions = ">=3.8" -groups = ["main"] -files = [ - {file = "mkdocs_material-9.5.18-py3-none-any.whl", hash = "sha256:1e0e27fc9fe239f9064318acf548771a4629d5fd5dfd45444fd80a953fe21eb4"}, - {file = "mkdocs_material-9.5.18.tar.gz", hash = "sha256:a43f470947053fa2405c33995f282d24992c752a50114f23f30da9d8d0c57e62"}, -] - -[package.dependencies] -babel = ">=2.10,<3.0" -colorama = ">=0.4,<1.0" -jinja2 = ">=3.0,<4.0" -markdown = ">=3.2,<4.0" -mkdocs = ">=1.5.3,<1.6.0" -mkdocs-material-extensions = ">=1.3,<2.0" -paginate = ">=0.5,<1.0" -pygments = ">=2.16,<3.0" -pymdown-extensions = ">=10.2,<11.0" -regex = ">=2022.4" -requests = ">=2.26,<3.0" - -[package.extras] -git = ["mkdocs-git-committers-plugin-2 (>=1.1,<2.0)", "mkdocs-git-revision-date-localized-plugin (>=1.2.4,<2.0)"] -imaging = ["cairosvg (>=2.6,<3.0)", "pillow (>=10.2,<11.0)"] -recommended = ["mkdocs-minify-plugin (>=0.7,<1.0)", "mkdocs-redirects (>=1.2,<2.0)", "mkdocs-rss-plugin (>=1.6,<2.0)"] - -[[package]] -name = "mkdocs-material-extensions" -version = "1.3.1" -description = "Extension pack for Python Markdown and MkDocs Material." -optional = false -python-versions = ">=3.8" -groups = ["main"] -files = [ - {file = "mkdocs_material_extensions-1.3.1-py3-none-any.whl", hash = "sha256:adff8b62700b25cb77b53358dad940f3ef973dd6db797907c49e3c2ef3ab4e31"}, - {file = "mkdocs_material_extensions-1.3.1.tar.gz", hash = "sha256:10c9511cea88f568257f960358a467d12b970e1f7b2c0e5fb2bb48cab1928443"}, -] - -[[package]] -name = "mkdocs-render-swagger-plugin" -version = "0.1.2" -description = "MKDocs plugin for rendering swagger & openapi files." -optional = false -python-versions = ">=3.8" -groups = ["main"] -files = [ - {file = "mkdocs_render_swagger_plugin-0.1.2-py3-none-any.whl", hash = "sha256:0340d45e12a0f1633a4b8af2eb99ea75469006ddf418ef0f1acd51ed1e4969e4"}, -] - -[package.dependencies] -mkdocs = ">=1.4" - -[package.extras] -dev = ["coverage", "flake8", "mypy", "pyyaml"] - -[[package]] -name = "mkdocs-section-index" -version = "0.3.9" -description = "MkDocs plugin to allow clickable sections that lead to an index page" -optional = false -python-versions = ">=3.8" -groups = ["main"] -files = [ - {file = "mkdocs_section_index-0.3.9-py3-none-any.whl", hash = "sha256:5e5eb288e8d7984d36c11ead5533f376fdf23498f44e903929d72845b24dfe34"}, - {file = "mkdocs_section_index-0.3.9.tar.gz", hash = "sha256:b66128d19108beceb08b226ee1ba0981840d14baf8a652b6c59e650f3f92e4f8"}, -] - -[package.dependencies] -mkdocs = ">=1.2" - -[[package]] -name = "mkdocstrings" -version = "0.26.1" -description = "Automatic documentation from sources, for MkDocs." -optional = false -python-versions = ">=3.8" -groups = ["main"] -files = [ - {file = "mkdocstrings-0.26.1-py3-none-any.whl", hash = "sha256:29738bfb72b4608e8e55cc50fb8a54f325dc7ebd2014e4e3881a49892d5983cf"}, - {file = "mkdocstrings-0.26.1.tar.gz", hash = "sha256:bb8b8854d6713d5348ad05b069a09f3b79edbc6a0f33a34c6821141adb03fe33"}, -] - -[package.dependencies] -click = ">=7.0" -importlib-metadata = {version = ">=4.6", markers = "python_version < \"3.10\""} -Jinja2 = ">=2.11.1" -Markdown = ">=3.6" -MarkupSafe = ">=1.1" -mkdocs = ">=1.4" -mkdocs-autorefs = ">=1.2" -mkdocstrings-python = {version = ">=0.5.2", optional = true, markers = "extra == \"python\""} -platformdirs = ">=2.2" -pymdown-extensions = ">=6.3" -typing-extensions = {version = ">=4.1", markers = "python_version < \"3.10\""} - -[package.extras] -crystal = ["mkdocstrings-crystal (>=0.3.4)"] -python = ["mkdocstrings-python (>=0.5.2)"] -python-legacy = ["mkdocstrings-python-legacy (>=0.2.1)"] - -[[package]] -name = "mkdocstrings-python" -version = "1.11.1" -description = "A Python handler for mkdocstrings." -optional = false -python-versions = ">=3.8" -groups = ["main"] -files = [ - {file = "mkdocstrings_python-1.11.1-py3-none-any.whl", hash = "sha256:a21a1c05acef129a618517bb5aae3e33114f569b11588b1e7af3e9d4061a71af"}, - {file = "mkdocstrings_python-1.11.1.tar.gz", hash = "sha256:8824b115c5359304ab0b5378a91f6202324a849e1da907a3485b59208b797322"}, -] - -[package.dependencies] -griffe = ">=0.49" -mkdocs-autorefs = ">=1.2" -mkdocstrings = ">=0.26" - -[[package]] -name = "packaging" -version = "24.2" -description = "Core utilities for Python packages" -optional = false -python-versions = ">=3.8" -groups = ["main"] -files = [ - {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, - {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, -] - -[[package]] -name = "paginate" -version = "0.5.7" -description = "Divides large result sets into pages for easier browsing" -optional = false -python-versions = "*" -groups = ["main"] -files = [ - {file = "paginate-0.5.7-py2.py3-none-any.whl", hash = "sha256:b885e2af73abcf01d9559fd5216b57ef722f8c42affbb63942377668e35c7591"}, - {file = "paginate-0.5.7.tar.gz", hash = "sha256:22bd083ab41e1a8b4f3690544afb2c60c25e5c9a63a30fa2f483f6c60c8e5945"}, -] - -[package.extras] -dev = ["pytest", "tox"] -lint = ["black"] - -[[package]] -name = "pathspec" -version = "0.12.1" -description = "Utility library for gitignore style pattern matching of file paths." -optional = false -python-versions = ">=3.8" -groups = ["main"] -files = [ - {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, - {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, -] - -[[package]] -name = "platformdirs" -version = "4.3.6" -description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." -optional = false -python-versions = ">=3.8" -groups = ["main"] -files = [ - {file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"}, - {file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"}, -] - -[package.extras] -docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)"] -type = ["mypy (>=1.11.2)"] - -[[package]] -name = "pygments" -version = "2.18.0" -description = "Pygments is a syntax highlighting package written in Python." -optional = false -python-versions = ">=3.8" -groups = ["main"] -files = [ - {file = "pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a"}, - {file = "pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199"}, -] - -[package.extras] -windows-terminal = ["colorama (>=0.4.6)"] - -[[package]] -name = "pymdown-extensions" -version = "10.12" -description = "Extension pack for Python Markdown." -optional = false -python-versions = ">=3.8" -groups = ["main"] -files = [ - {file = "pymdown_extensions-10.12-py3-none-any.whl", hash = "sha256:49f81412242d3527b8b4967b990df395c89563043bc51a3d2d7d500e52123b77"}, - {file = "pymdown_extensions-10.12.tar.gz", hash = "sha256:b0ee1e0b2bef1071a47891ab17003bfe5bf824a398e13f49f8ed653b699369a7"}, -] - -[package.dependencies] -markdown = ">=3.6" -pyyaml = "*" - -[package.extras] -extra = ["pygments (>=2.12)"] - -[[package]] -name = "python-dateutil" -version = "2.9.0.post0" -description = "Extensions to the standard Python datetime module" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" -groups = ["main"] -files = [ - {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, - {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, -] - -[package.dependencies] -six = ">=1.5" - -[[package]] -name = "pytz" -version = "2024.2" -description = "World timezone definitions, modern and historical" -optional = false -python-versions = "*" -groups = ["main"] -markers = "python_version < \"3.9\"" -files = [ - {file = "pytz-2024.2-py2.py3-none-any.whl", hash = "sha256:31c7c1817eb7fae7ca4b8c7ee50c72f93aa2dd863de768e1ef4245d426aa0725"}, - {file = "pytz-2024.2.tar.gz", hash = "sha256:2aa355083c50a0f93fa581709deac0c9ad65cca8a9e9beac660adcbd493c798a"}, -] - -[[package]] -name = "pyyaml" -version = "6.0.2" -description = "YAML parser and emitter for Python" -optional = false -python-versions = ">=3.8" -groups = ["main"] -files = [ - {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, - {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, - {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, - {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, - {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, - {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, - {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, - {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, - {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, - {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, - {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, - {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, - {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, - {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, - {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, - {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, - {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, - {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, - {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, - {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, - {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, - {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, - {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, - {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, - {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, - {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, - {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, - {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, - {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, - {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, - {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, - {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, - {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, - {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, - {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, - {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, - {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"}, - {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"}, - {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"}, - {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"}, - {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"}, - {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"}, - {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"}, - {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, - {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, - {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, - {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, - {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, - {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, - {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, - {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, - {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, - {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, -] - -[[package]] -name = "pyyaml-env-tag" -version = "0.1" -description = "A custom YAML tag for referencing environment variables in YAML files. " -optional = false -python-versions = ">=3.6" -groups = ["main"] -files = [ - {file = "pyyaml_env_tag-0.1-py3-none-any.whl", hash = "sha256:af31106dec8a4d68c60207c1886031cbf839b68aa7abccdb19868200532c2069"}, - {file = "pyyaml_env_tag-0.1.tar.gz", hash = "sha256:70092675bda14fdec33b31ba77e7543de9ddc88f2e5b99160396572d11525bdb"}, -] - -[package.dependencies] -pyyaml = "*" - -[[package]] -name = "regex" -version = "2024.11.6" -description = "Alternative regular expression module, to replace re." -optional = false -python-versions = ">=3.8" -groups = ["main"] -files = [ - {file = "regex-2024.11.6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ff590880083d60acc0433f9c3f713c51f7ac6ebb9adf889c79a261ecf541aa91"}, - {file = "regex-2024.11.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:658f90550f38270639e83ce492f27d2c8d2cd63805c65a13a14d36ca126753f0"}, - {file = "regex-2024.11.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:164d8b7b3b4bcb2068b97428060b2a53be050085ef94eca7f240e7947f1b080e"}, - {file = "regex-2024.11.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3660c82f209655a06b587d55e723f0b813d3a7db2e32e5e7dc64ac2a9e86fde"}, - {file = "regex-2024.11.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d22326fcdef5e08c154280b71163ced384b428343ae16a5ab2b3354aed12436e"}, - {file = "regex-2024.11.6-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f1ac758ef6aebfc8943560194e9fd0fa18bcb34d89fd8bd2af18183afd8da3a2"}, - {file = "regex-2024.11.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:997d6a487ff00807ba810e0f8332c18b4eb8d29463cfb7c820dc4b6e7562d0cf"}, - {file = "regex-2024.11.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:02a02d2bb04fec86ad61f3ea7f49c015a0681bf76abb9857f945d26159d2968c"}, - {file = "regex-2024.11.6-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f02f93b92358ee3f78660e43b4b0091229260c5d5c408d17d60bf26b6c900e86"}, - {file = "regex-2024.11.6-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:06eb1be98df10e81ebaded73fcd51989dcf534e3c753466e4b60c4697a003b67"}, - {file = "regex-2024.11.6-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:040df6fe1a5504eb0f04f048e6d09cd7c7110fef851d7c567a6b6e09942feb7d"}, - {file = "regex-2024.11.6-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:fdabbfc59f2c6edba2a6622c647b716e34e8e3867e0ab975412c5c2f79b82da2"}, - {file = "regex-2024.11.6-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:8447d2d39b5abe381419319f942de20b7ecd60ce86f16a23b0698f22e1b70008"}, - {file = "regex-2024.11.6-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:da8f5fc57d1933de22a9e23eec290a0d8a5927a5370d24bda9a6abe50683fe62"}, - {file = "regex-2024.11.6-cp310-cp310-win32.whl", hash = "sha256:b489578720afb782f6ccf2840920f3a32e31ba28a4b162e13900c3e6bd3f930e"}, - {file = "regex-2024.11.6-cp310-cp310-win_amd64.whl", hash = "sha256:5071b2093e793357c9d8b2929dfc13ac5f0a6c650559503bb81189d0a3814519"}, - {file = "regex-2024.11.6-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5478c6962ad548b54a591778e93cd7c456a7a29f8eca9c49e4f9a806dcc5d638"}, - {file = "regex-2024.11.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2c89a8cc122b25ce6945f0423dc1352cb9593c68abd19223eebbd4e56612c5b7"}, - {file = "regex-2024.11.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:94d87b689cdd831934fa3ce16cc15cd65748e6d689f5d2b8f4f4df2065c9fa20"}, - {file = "regex-2024.11.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1062b39a0a2b75a9c694f7a08e7183a80c63c0d62b301418ffd9c35f55aaa114"}, - {file = "regex-2024.11.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:167ed4852351d8a750da48712c3930b031f6efdaa0f22fa1933716bfcd6bf4a3"}, - {file = "regex-2024.11.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d548dafee61f06ebdb584080621f3e0c23fff312f0de1afc776e2a2ba99a74f"}, - {file = "regex-2024.11.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2a19f302cd1ce5dd01a9099aaa19cae6173306d1302a43b627f62e21cf18ac0"}, - {file = "regex-2024.11.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bec9931dfb61ddd8ef2ebc05646293812cb6b16b60cf7c9511a832b6f1854b55"}, - {file = "regex-2024.11.6-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:9714398225f299aa85267fd222f7142fcb5c769e73d7733344efc46f2ef5cf89"}, - {file = "regex-2024.11.6-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:202eb32e89f60fc147a41e55cb086db2a3f8cb82f9a9a88440dcfc5d37faae8d"}, - {file = "regex-2024.11.6-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:4181b814e56078e9b00427ca358ec44333765f5ca1b45597ec7446d3a1ef6e34"}, - {file = "regex-2024.11.6-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:068376da5a7e4da51968ce4c122a7cd31afaaec4fccc7856c92f63876e57b51d"}, - {file = "regex-2024.11.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ac10f2c4184420d881a3475fb2c6f4d95d53a8d50209a2500723d831036f7c45"}, - {file = "regex-2024.11.6-cp311-cp311-win32.whl", hash = "sha256:c36f9b6f5f8649bb251a5f3f66564438977b7ef8386a52460ae77e6070d309d9"}, - {file = "regex-2024.11.6-cp311-cp311-win_amd64.whl", hash = "sha256:02e28184be537f0e75c1f9b2f8847dc51e08e6e171c6bde130b2687e0c33cf60"}, - {file = "regex-2024.11.6-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:52fb28f528778f184f870b7cf8f225f5eef0a8f6e3778529bdd40c7b3920796a"}, - {file = "regex-2024.11.6-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:fdd6028445d2460f33136c55eeb1f601ab06d74cb3347132e1c24250187500d9"}, - {file = "regex-2024.11.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:805e6b60c54bf766b251e94526ebad60b7de0c70f70a4e6210ee2891acb70bf2"}, - {file = "regex-2024.11.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b85c2530be953a890eaffde05485238f07029600e8f098cdf1848d414a8b45e4"}, - {file = "regex-2024.11.6-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bb26437975da7dc36b7efad18aa9dd4ea569d2357ae6b783bf1118dabd9ea577"}, - {file = "regex-2024.11.6-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:abfa5080c374a76a251ba60683242bc17eeb2c9818d0d30117b4486be10c59d3"}, - {file = "regex-2024.11.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b7fa6606c2881c1db9479b0eaa11ed5dfa11c8d60a474ff0e095099f39d98e"}, - {file = "regex-2024.11.6-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0c32f75920cf99fe6b6c539c399a4a128452eaf1af27f39bce8909c9a3fd8cbe"}, - {file = "regex-2024.11.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:982e6d21414e78e1f51cf595d7f321dcd14de1f2881c5dc6a6e23bbbbd68435e"}, - {file = "regex-2024.11.6-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a7c2155f790e2fb448faed6dd241386719802296ec588a8b9051c1f5c481bc29"}, - {file = "regex-2024.11.6-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:149f5008d286636e48cd0b1dd65018548944e495b0265b45e1bffecce1ef7f39"}, - {file = "regex-2024.11.6-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:e5364a4502efca094731680e80009632ad6624084aff9a23ce8c8c6820de3e51"}, - {file = "regex-2024.11.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:0a86e7eeca091c09e021db8eb72d54751e527fa47b8d5787caf96d9831bd02ad"}, - {file = "regex-2024.11.6-cp312-cp312-win32.whl", hash = "sha256:32f9a4c643baad4efa81d549c2aadefaeba12249b2adc5af541759237eee1c54"}, - {file = "regex-2024.11.6-cp312-cp312-win_amd64.whl", hash = "sha256:a93c194e2df18f7d264092dc8539b8ffb86b45b899ab976aa15d48214138e81b"}, - {file = "regex-2024.11.6-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:a6ba92c0bcdf96cbf43a12c717eae4bc98325ca3730f6b130ffa2e3c3c723d84"}, - {file = "regex-2024.11.6-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:525eab0b789891ac3be914d36893bdf972d483fe66551f79d3e27146191a37d4"}, - {file = "regex-2024.11.6-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:086a27a0b4ca227941700e0b31425e7a28ef1ae8e5e05a33826e17e47fbfdba0"}, - {file = "regex-2024.11.6-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bde01f35767c4a7899b7eb6e823b125a64de314a8ee9791367c9a34d56af18d0"}, - {file = "regex-2024.11.6-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b583904576650166b3d920d2bcce13971f6f9e9a396c673187f49811b2769dc7"}, - {file = "regex-2024.11.6-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1c4de13f06a0d54fa0d5ab1b7138bfa0d883220965a29616e3ea61b35d5f5fc7"}, - {file = "regex-2024.11.6-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3cde6e9f2580eb1665965ce9bf17ff4952f34f5b126beb509fee8f4e994f143c"}, - {file = "regex-2024.11.6-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0d7f453dca13f40a02b79636a339c5b62b670141e63efd511d3f8f73fba162b3"}, - {file = "regex-2024.11.6-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:59dfe1ed21aea057a65c6b586afd2a945de04fc7db3de0a6e3ed5397ad491b07"}, - {file = "regex-2024.11.6-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b97c1e0bd37c5cd7902e65f410779d39eeda155800b65fc4d04cc432efa9bc6e"}, - {file = "regex-2024.11.6-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f9d1e379028e0fc2ae3654bac3cbbef81bf3fd571272a42d56c24007979bafb6"}, - {file = "regex-2024.11.6-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:13291b39131e2d002a7940fb176e120bec5145f3aeb7621be6534e46251912c4"}, - {file = "regex-2024.11.6-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4f51f88c126370dcec4908576c5a627220da6c09d0bff31cfa89f2523843316d"}, - {file = "regex-2024.11.6-cp313-cp313-win32.whl", hash = "sha256:63b13cfd72e9601125027202cad74995ab26921d8cd935c25f09c630436348ff"}, - {file = "regex-2024.11.6-cp313-cp313-win_amd64.whl", hash = "sha256:2b3361af3198667e99927da8b84c1b010752fa4b1115ee30beaa332cabc3ef1a"}, - {file = "regex-2024.11.6-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:3a51ccc315653ba012774efca4f23d1d2a8a8f278a6072e29c7147eee7da446b"}, - {file = "regex-2024.11.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ad182d02e40de7459b73155deb8996bbd8e96852267879396fb274e8700190e3"}, - {file = "regex-2024.11.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ba9b72e5643641b7d41fa1f6d5abda2c9a263ae835b917348fc3c928182ad467"}, - {file = "regex-2024.11.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40291b1b89ca6ad8d3f2b82782cc33807f1406cf68c8d440861da6304d8ffbbd"}, - {file = "regex-2024.11.6-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cdf58d0e516ee426a48f7b2c03a332a4114420716d55769ff7108c37a09951bf"}, - {file = "regex-2024.11.6-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a36fdf2af13c2b14738f6e973aba563623cb77d753bbbd8d414d18bfaa3105dd"}, - {file = "regex-2024.11.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d1cee317bfc014c2419a76bcc87f071405e3966da434e03e13beb45f8aced1a6"}, - {file = "regex-2024.11.6-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:50153825ee016b91549962f970d6a4442fa106832e14c918acd1c8e479916c4f"}, - {file = "regex-2024.11.6-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ea1bfda2f7162605f6e8178223576856b3d791109f15ea99a9f95c16a7636fb5"}, - {file = "regex-2024.11.6-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:df951c5f4a1b1910f1a99ff42c473ff60f8225baa1cdd3539fe2819d9543e9df"}, - {file = "regex-2024.11.6-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:072623554418a9911446278f16ecb398fb3b540147a7828c06e2011fa531e773"}, - {file = "regex-2024.11.6-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:f654882311409afb1d780b940234208a252322c24a93b442ca714d119e68086c"}, - {file = "regex-2024.11.6-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:89d75e7293d2b3e674db7d4d9b1bee7f8f3d1609428e293771d1a962617150cc"}, - {file = "regex-2024.11.6-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:f65557897fc977a44ab205ea871b690adaef6b9da6afda4790a2484b04293a5f"}, - {file = "regex-2024.11.6-cp38-cp38-win32.whl", hash = "sha256:6f44ec28b1f858c98d3036ad5d7d0bfc568bdd7a74f9c24e25f41ef1ebfd81a4"}, - {file = "regex-2024.11.6-cp38-cp38-win_amd64.whl", hash = "sha256:bb8f74f2f10dbf13a0be8de623ba4f9491faf58c24064f32b65679b021ed0001"}, - {file = "regex-2024.11.6-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5704e174f8ccab2026bd2f1ab6c510345ae8eac818b613d7d73e785f1310f839"}, - {file = "regex-2024.11.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:220902c3c5cc6af55d4fe19ead504de80eb91f786dc102fbd74894b1551f095e"}, - {file = "regex-2024.11.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5e7e351589da0850c125f1600a4c4ba3c722efefe16b297de54300f08d734fbf"}, - {file = "regex-2024.11.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5056b185ca113c88e18223183aa1a50e66507769c9640a6ff75859619d73957b"}, - {file = "regex-2024.11.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2e34b51b650b23ed3354b5a07aab37034d9f923db2a40519139af34f485f77d0"}, - {file = "regex-2024.11.6-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5670bce7b200273eee1840ef307bfa07cda90b38ae56e9a6ebcc9f50da9c469b"}, - {file = "regex-2024.11.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:08986dce1339bc932923e7d1232ce9881499a0e02925f7402fb7c982515419ef"}, - {file = "regex-2024.11.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:93c0b12d3d3bc25af4ebbf38f9ee780a487e8bf6954c115b9f015822d3bb8e48"}, - {file = "regex-2024.11.6-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:764e71f22ab3b305e7f4c21f1a97e1526a25ebdd22513e251cf376760213da13"}, - {file = "regex-2024.11.6-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:f056bf21105c2515c32372bbc057f43eb02aae2fda61052e2f7622c801f0b4e2"}, - {file = "regex-2024.11.6-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:69ab78f848845569401469da20df3e081e6b5a11cb086de3eed1d48f5ed57c95"}, - {file = "regex-2024.11.6-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:86fddba590aad9208e2fa8b43b4c098bb0ec74f15718bb6a704e3c63e2cef3e9"}, - {file = "regex-2024.11.6-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:684d7a212682996d21ca12ef3c17353c021fe9de6049e19ac8481ec35574a70f"}, - {file = "regex-2024.11.6-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a03e02f48cd1abbd9f3b7e3586d97c8f7a9721c436f51a5245b3b9483044480b"}, - {file = "regex-2024.11.6-cp39-cp39-win32.whl", hash = "sha256:41758407fc32d5c3c5de163888068cfee69cb4c2be844e7ac517a52770f9af57"}, - {file = "regex-2024.11.6-cp39-cp39-win_amd64.whl", hash = "sha256:b2837718570f95dd41675328e111345f9b7095d821bac435aac173ac80b19983"}, - {file = "regex-2024.11.6.tar.gz", hash = "sha256:7ab159b063c52a0333c884e4679f8d7a85112ee3078fe3d9004b2dd875585519"}, -] - -[[package]] -name = "requests" -version = "2.32.4" -description = "Python HTTP for Humans." -optional = false -python-versions = ">=3.8" -groups = ["main"] -files = [ - {file = "requests-2.32.4-py3-none-any.whl", hash = "sha256:27babd3cda2a6d50b30443204ee89830707d396671944c998b5975b031ac2b2c"}, - {file = "requests-2.32.4.tar.gz", hash = "sha256:27d0316682c8a29834d3264820024b62a36942083d52caf2f14c0591336d3422"}, -] - -[package.dependencies] -certifi = ">=2017.4.17" -charset_normalizer = ">=2,<4" -idna = ">=2.5,<4" -urllib3 = ">=1.21.1,<3" - -[package.extras] -socks = ["PySocks (>=1.5.6,!=1.5.7)"] -use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] - -[[package]] -name = "six" -version = "1.16.0" -description = "Python 2 and 3 compatibility utilities" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" -groups = ["main"] -files = [ - {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, - {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, -] - -[[package]] -name = "typing-extensions" -version = "4.12.2" -description = "Backported and Experimental Type Hints for Python 3.8+" -optional = false -python-versions = ">=3.8" -groups = ["main"] -markers = "python_version < \"3.10\"" -files = [ - {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, - {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, -] - -[[package]] -name = "urllib3" -version = "2.2.3" -description = "HTTP library with thread-safe connection pooling, file post, and more." -optional = false -python-versions = ">=3.8" -groups = ["main"] -files = [ - {file = "urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac"}, - {file = "urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9"}, -] - -[package.extras] -brotli = ["brotli (>=1.0.9) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\""] -h2 = ["h2 (>=4,<5)"] -socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] -zstd = ["zstandard (>=0.18.0)"] - -[[package]] -name = "watchdog" -version = "4.0.2" -description = "Filesystem events monitoring" -optional = false -python-versions = ">=3.8" -groups = ["main"] -files = [ - {file = "watchdog-4.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ede7f010f2239b97cc79e6cb3c249e72962404ae3865860855d5cbe708b0fd22"}, - {file = "watchdog-4.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a2cffa171445b0efa0726c561eca9a27d00a1f2b83846dbd5a4f639c4f8ca8e1"}, - {file = "watchdog-4.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c50f148b31b03fbadd6d0b5980e38b558046b127dc483e5e4505fcef250f9503"}, - {file = "watchdog-4.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7c7d4bf585ad501c5f6c980e7be9c4f15604c7cc150e942d82083b31a7548930"}, - {file = "watchdog-4.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:914285126ad0b6eb2258bbbcb7b288d9dfd655ae88fa28945be05a7b475a800b"}, - {file = "watchdog-4.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:984306dc4720da5498b16fc037b36ac443816125a3705dfde4fd90652d8028ef"}, - {file = "watchdog-4.0.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:1cdcfd8142f604630deef34722d695fb455d04ab7cfe9963055df1fc69e6727a"}, - {file = "watchdog-4.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d7ab624ff2f663f98cd03c8b7eedc09375a911794dfea6bf2a359fcc266bff29"}, - {file = "watchdog-4.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:132937547a716027bd5714383dfc40dc66c26769f1ce8a72a859d6a48f371f3a"}, - {file = "watchdog-4.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:cd67c7df93eb58f360c43802acc945fa8da70c675b6fa37a241e17ca698ca49b"}, - {file = "watchdog-4.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:bcfd02377be80ef3b6bc4ce481ef3959640458d6feaae0bd43dd90a43da90a7d"}, - {file = "watchdog-4.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:980b71510f59c884d684b3663d46e7a14b457c9611c481e5cef08f4dd022eed7"}, - {file = "watchdog-4.0.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:aa160781cafff2719b663c8a506156e9289d111d80f3387cf3af49cedee1f040"}, - {file = "watchdog-4.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f6ee8dedd255087bc7fe82adf046f0b75479b989185fb0bdf9a98b612170eac7"}, - {file = "watchdog-4.0.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0b4359067d30d5b864e09c8597b112fe0a0a59321a0f331498b013fb097406b4"}, - {file = "watchdog-4.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:770eef5372f146997638d737c9a3c597a3b41037cfbc5c41538fc27c09c3a3f9"}, - {file = "watchdog-4.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:eeea812f38536a0aa859972d50c76e37f4456474b02bd93674d1947cf1e39578"}, - {file = "watchdog-4.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b2c45f6e1e57ebb4687690c05bc3a2c1fb6ab260550c4290b8abb1335e0fd08b"}, - {file = "watchdog-4.0.2-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:10b6683df70d340ac3279eff0b2766813f00f35a1d37515d2c99959ada8f05fa"}, - {file = "watchdog-4.0.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:f7c739888c20f99824f7aa9d31ac8a97353e22d0c0e54703a547a218f6637eb3"}, - {file = "watchdog-4.0.2-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:c100d09ac72a8a08ddbf0629ddfa0b8ee41740f9051429baa8e31bb903ad7508"}, - {file = "watchdog-4.0.2-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:f5315a8c8dd6dd9425b974515081fc0aadca1d1d61e078d2246509fd756141ee"}, - {file = "watchdog-4.0.2-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:2d468028a77b42cc685ed694a7a550a8d1771bb05193ba7b24006b8241a571a1"}, - {file = "watchdog-4.0.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:f15edcae3830ff20e55d1f4e743e92970c847bcddc8b7509bcd172aa04de506e"}, - {file = "watchdog-4.0.2-py3-none-manylinux2014_aarch64.whl", hash = "sha256:936acba76d636f70db8f3c66e76aa6cb5136a936fc2a5088b9ce1c7a3508fc83"}, - {file = "watchdog-4.0.2-py3-none-manylinux2014_armv7l.whl", hash = "sha256:e252f8ca942a870f38cf785aef420285431311652d871409a64e2a0a52a2174c"}, - {file = "watchdog-4.0.2-py3-none-manylinux2014_i686.whl", hash = "sha256:0e83619a2d5d436a7e58a1aea957a3c1ccbf9782c43c0b4fed80580e5e4acd1a"}, - {file = "watchdog-4.0.2-py3-none-manylinux2014_ppc64.whl", hash = "sha256:88456d65f207b39f1981bf772e473799fcdc10801062c36fd5ad9f9d1d463a73"}, - {file = "watchdog-4.0.2-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:32be97f3b75693a93c683787a87a0dc8db98bb84701539954eef991fb35f5fbc"}, - {file = "watchdog-4.0.2-py3-none-manylinux2014_s390x.whl", hash = "sha256:c82253cfc9be68e3e49282831afad2c1f6593af80c0daf1287f6a92657986757"}, - {file = "watchdog-4.0.2-py3-none-manylinux2014_x86_64.whl", hash = "sha256:c0b14488bd336c5b1845cee83d3e631a1f8b4e9c5091ec539406e4a324f882d8"}, - {file = "watchdog-4.0.2-py3-none-win32.whl", hash = "sha256:0d8a7e523ef03757a5aa29f591437d64d0d894635f8a50f370fe37f913ce4e19"}, - {file = "watchdog-4.0.2-py3-none-win_amd64.whl", hash = "sha256:c344453ef3bf875a535b0488e3ad28e341adbd5a9ffb0f7d62cefacc8824ef2b"}, - {file = "watchdog-4.0.2-py3-none-win_ia64.whl", hash = "sha256:baececaa8edff42cd16558a639a9b0ddf425f93d892e8392a56bf904f5eff22c"}, - {file = "watchdog-4.0.2.tar.gz", hash = "sha256:b4dfbb6c49221be4535623ea4474a4d6ee0a9cef4a80b20c28db4d858b64e270"}, -] - -[package.extras] -watchmedo = ["PyYAML (>=3.10)"] - -[[package]] -name = "wheel" -version = "0.45.0" -description = "A built-package format for Python" -optional = false -python-versions = ">=3.8" -groups = ["main"] -markers = "python_version < \"3.9\"" -files = [ - {file = "wheel-0.45.0-py3-none-any.whl", hash = "sha256:52f0baa5e6522155090a09c6bd95718cc46956d1b51d537ea5454249edb671c7"}, - {file = "wheel-0.45.0.tar.gz", hash = "sha256:a57353941a3183b3d5365346b567a260a0602a0f8a635926a7dede41b94c674a"}, -] - -[package.extras] -test = ["pytest (>=6.0.0)", "setuptools (>=65)"] - -[[package]] -name = "zipp" -version = "3.20.2" -description = "Backport of pathlib-compatible object wrapper for zip files" -optional = false -python-versions = ">=3.8" -groups = ["main"] -markers = "python_version < \"3.10\"" -files = [ - {file = "zipp-3.20.2-py3-none-any.whl", hash = "sha256:a817ac80d6cf4b23bf7f2828b7cabf326f15a001bea8b1f9b49631780ba28350"}, - {file = "zipp-3.20.2.tar.gz", hash = "sha256:bc9eb26f4506fda01b81bcde0ca78103b6e62f991b381fec825435c836edbc29"}, -] - -[package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] -cover = ["pytest-cov"] -doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -enabler = ["pytest-enabler (>=2.2)"] -test = ["big-O", "importlib-resources ; python_version < \"3.9\"", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] -type = ["pytest-mypy"] - -[metadata] -lock-version = "2.1" -python-versions = ">=3.8,<3.13" -content-hash = "e3e0642693d670700459b42e2af5868efc11d24276fbb4e099b304c4b96f1759" diff --git a/public/favicon.svg b/public/favicon.svg new file mode 100644 index 0000000..9929fa5 --- /dev/null +++ b/public/favicon.svg @@ -0,0 +1,71 @@ + + + + diff --git a/pyproject.toml b/pyproject.toml deleted file mode 100644 index a5d6c91..0000000 --- a/pyproject.toml +++ /dev/null @@ -1,18 +0,0 @@ -[tool.poetry] -name = "yaptide developer documentation" -version = "1.0.0" -description = "Yaptide Yet Another Particle Transport IDE" -package-mode = false -authors = [] -readme = "README.md" - -[tool.poetry.dependencies] -python = ">=3.8,<3.13" -mkdocs = "1.5.3" -mkdocs-coverage = "1.0.0" -mkdocstrings = {version = ">=0.18", extras = ["python"]} -mkdocs-material = "9.5.18" -mkdocs-gen-files = "0.5.0" -mkdocs-literate-nav = "0.6.1" -mkdocs-section-index = "0.3.9" -mkdocs-render-swagger-plugin = "0.1.2" \ No newline at end of file diff --git a/src/assets/yaptide-logo.svg b/src/assets/yaptide-logo.svg new file mode 100644 index 0000000..9929fa5 --- /dev/null +++ b/src/assets/yaptide-logo.svg @@ -0,0 +1,71 @@ + + + + diff --git a/src/content.config.ts b/src/content.config.ts new file mode 100644 index 0000000..d9ee8c9 --- /dev/null +++ b/src/content.config.ts @@ -0,0 +1,7 @@ +import { defineCollection } from 'astro:content'; +import { docsLoader } from '@astrojs/starlight/loaders'; +import { docsSchema } from '@astrojs/starlight/schema'; + +export const collections = { + docs: defineCollection({ loader: docsLoader(), schema: docsSchema() }), +}; diff --git a/src/content/docs/api-reference/auth.md b/src/content/docs/api-reference/auth.md new file mode 100644 index 0000000..ec33323 --- /dev/null +++ b/src/content/docs/api-reference/auth.md @@ -0,0 +1,214 @@ +--- +title: Auth Endpoints +description: Authentication and session management API. +--- + +## Register + +Create a new local user account. + +```http +PUT /auth/register +Content-Type: application/json + +{ + "username": "researcher", + "password": "secure-password" +} +``` + +**Response** `201 Created` + +```json +{ + "message": "User created", + "status_code": 201 +} +``` + +**Errors:** +- `400` — Missing username or password +- `403` — Registration is disabled on this instance + +--- + +## Login + +Authenticate with username and password. Sets HTTP-only cookies for `access_token` and `refresh_token`. + +```http +POST /auth/login +Content-Type: application/json + +{ + "username": "researcher", + "password": "secure-password" +} +``` + +**Response** `202 Accepted` + +```json +{ + "message": "Login successful", + "access_exp": "2024-01-15T12:30:00Z", + "refresh_exp": "2024-01-16T12:00:00Z" +} +``` + +**Response headers set:** +- `Set-Cookie: access_token=; HttpOnly; SameSite=None; Secure` +- `Set-Cookie: refresh_token=; HttpOnly; SameSite=None; Secure` + +**Errors:** +- `401` — Invalid credentials +- `400` — Missing fields + +--- + +## Refresh Token + +Exchange a valid refresh token for a new access token. + +```http +GET /auth/refresh +Cookie: refresh_token= +``` + +**Response** `200 OK` + +```json +{ + "message": "Token refreshed", + "access_exp": "2024-01-15T12:45:00Z" +} +``` + +**Response headers set:** +- `Set-Cookie: access_token=; HttpOnly; SameSite=None; Secure` + +**Errors:** +- `401` — Invalid or expired refresh token + +--- + +## Status + +Get information about the currently authenticated user. + +```http +GET /auth/status +Cookie: access_token= +``` + +**Response** `200 OK` + +```json +{ + "message": "User status", + "username": "researcher", + "source": "local" +} +``` + +The `source` field indicates the authentication provider: `"local"` or `"keycloak"`. + +--- + +## Logout + +Delete authentication cookies and end the session. + +```http +DELETE /auth/logout +Cookie: access_token= +``` + +**Response** `200 OK` + +```json +{ + "message": "Logout successful" +} +``` + +**Response headers set:** +- `Set-Cookie: access_token=; Max-Age=0` +- `Set-Cookie: refresh_token=; Max-Age=0` + +--- + +## Keycloak Login + +Authenticate via Keycloak SSO. The frontend obtains a Keycloak token through the OIDC flow and sends it here. + +```http +POST /auth/keycloak +Content-Type: application/json + +{ + "keycloak_token": "" +} +``` + +**Response** `202 Accepted` + +```json +{ + "message": "Login successful", + "access_exp": "2024-01-15T12:30:00Z" +} +``` + +The backend: +1. Validates the Keycloak JWT against the configured realm. +2. Extracts user info from the token claims. +3. Creates or updates the user in the local database. +4. Fetches SSH certificates from the PLGrid proxy (if available). +5. Issues a backend JWT via `Set-Cookie`. + +**Errors:** +- `401` — Invalid Keycloak token +- `500` — Keycloak validation failed + +--- + +## Keycloak Logout + +```http +DELETE /auth/keycloak +Cookie: access_token= +``` + +**Response** `200 OK` + +```json +{ + "message": "Logout successful" +} +``` + +--- + +## Token Lifecycle + +``` +┌─────────────┐ POST /auth/login ┌─────────────────┐ +│ Client │ ──────────────────────────▶│ Backend │ +│ │ ◀────── Set-Cookie ────────│ (JWT issued) │ +│ │ └─────────────────┘ +│ │ GET /auth/status +│ │ ──── Cookie: access_token ──▶ +│ │ ◀──── 200 OK ────────────── +│ │ +│ │ GET /auth/refresh +│ (token │ ──── Cookie: refresh_token ─▶ +│ expiring) │ ◀──── new access_token ───── +│ │ +│ │ DELETE /auth/logout +│ │ ──── Cookie: access_token ──▶ +│ │ ◀──── cookies cleared ────── +└─────────────┘ +``` + +The frontend auto-refreshes the access token before expiry using `GET /auth/refresh`. diff --git a/src/content/docs/api-reference/jobs.md b/src/content/docs/api-reference/jobs.md new file mode 100644 index 0000000..c98b83a --- /dev/null +++ b/src/content/docs/api-reference/jobs.md @@ -0,0 +1,213 @@ +--- +title: Jobs Endpoints +description: Simulation submission, monitoring, and cancellation API. +--- + +YAPTIDE supports two execution backends: **direct** (Celery workers on the server) and **batch** (SLURM on HPC clusters). Both share the same request format. + +## Submit Direct Job + +Run a simulation using local Celery workers. + +```http +POST /jobs/direct +Cookie: access_token= +Content-Type: application/json + +{ + "sim_type": "shieldhit", + "ntasks": 10, + "input_type": "editor", + "sim_data": { ... } +} +``` + +**Parameters:** + +| Field | Type | Required | Description | +|---|---|---|---| +| `sim_type` | string | Yes | Simulator: `shieldhit`, `fluka`, `geant4`, `topas` | +| `ntasks` | integer | Yes | Number of parallel tasks (splits primaries) | +| `input_type` | string | Yes | `"editor"` (project JSON) or `"files"` (raw input files) | +| `sim_data` | object | Yes | Project JSON (when `input_type` is `"editor"`) | + +**Response** `202 Accepted` + +```json +{ + "message": "Job submitted", + "job_id": "abc123-def456" +} +``` + +**Errors:** +- `400` — Missing required fields or invalid sim_type +- `500` — Conversion or task dispatch failed + +--- + +## Get Direct Job Status + +```http +GET /jobs/direct?job_id=abc123-def456 +Cookie: access_token= +``` + +**Response** `200 OK` + +```json +{ + "message": "Job status", + "job_state": "RUNNING", + "job_tasks_status": [ + {"task_id": 1, "task_state": "COMPLETED", "simulated_primaries": 1000, "requested_primaries": 1000}, + {"task_id": 2, "task_state": "RUNNING", "simulated_primaries": 500, "requested_primaries": 1000} + ] +} +``` + +**Job states:** + +| State | Description | +|---|---| +| `UNKNOWN` | Job not found or not yet initialized | +| `PENDING` | Submitted, waiting for a worker | +| `RUNNING` | At least one task is executing | +| `MERGING_QUEUED` | All tasks done, waiting for result merge | +| `MERGING_RUNNING` | Results being merged | +| `COMPLETED` | All tasks finished successfully | +| `FAILED` | One or more tasks failed | +| `CANCELED` | Job was manually cancelled | + +--- + +## Cancel Direct Job + +```http +DELETE /jobs/direct?job_id=abc123-def456 +Cookie: access_token= +``` + +**Response** `200 OK` + +```json +{ + "message": "Job cancelled" +} +``` + +Revokes all pending Celery tasks and terminates running ones. + +--- + +## Submit Batch Job + +Submit a simulation to an HPC cluster via SLURM. **Requires Keycloak authentication.** + +```http +POST /jobs/batch +Cookie: access_token= +Content-Type: application/json + +{ + "sim_type": "shieldhit", + "ntasks": 100, + "input_type": "editor", + "sim_data": { ... }, + "batch_options": { + "cluster_name": "prometheus", + "slurm_options": { + "time": "01:00:00", + "partition": "plgrid" + } + } +} +``` + +**Additional parameters:** + +| Field | Type | Required | Description | +|---|---|---|---| +| `batch_options` | object | No | SLURM cluster selection and resource options | +| `batch_options.cluster_name` | string | No | Target cluster name | +| `batch_options.slurm_options` | object | No | Custom SLURM headers (time, partition, etc.) | + +**Response** `202 Accepted` + +```json +{ + "message": "Batch job submitted", + "job_id": "batch-789xyz" +} +``` + +**Errors:** +- `403` — Not a Keycloak-authenticated user +- `500` — SSH connection or SLURM submission failed + +--- + +## Get Batch Job Status + +```http +GET /jobs/batch?job_id=batch-789xyz +Cookie: access_token= +``` + +Response format is identical to direct job status. + +--- + +## Cancel Batch Job + +```http +DELETE /jobs/batch?job_id=batch-789xyz +Cookie: access_token= +``` + +Sends a SLURM `scancel` command to the cluster. + +--- + +## Get Job Status (Database) + +Platform-agnostic endpoint that reads job status from the database (works for both direct and batch). + +```http +GET /jobs?job_id=abc123-def456 +Cookie: access_token= +``` + +**Response** `200 OK` + +```json +{ + "message": "Job status", + "job_state": "COMPLETED", + "job_tasks_status": [ + {"task_id": 1, "task_state": "COMPLETED"}, + {"task_id": 2, "task_state": "COMPLETED"} + ] +} +``` + +> **Tip:** Use `GET /jobs/direct` or `GET /jobs/batch` during active simulation for real-time status. Use `GET /jobs` for historical lookups from the database. + +--- + +## Internal: Update Job State + +**Worker-facing only.** Called by Celery workers and batch helpers to report status changes. + +```http +POST /jobs +Content-Type: application/json + +{ + "sim_id": "abc123-def456", + "update_key": "", + "job_state": "RUNNING" +} +``` + +This endpoint is not user-facing — it uses `update_key` authentication instead of JWT cookies. diff --git a/src/content/docs/api-reference/overview.md b/src/content/docs/api-reference/overview.md new file mode 100644 index 0000000..a43fdb4 --- /dev/null +++ b/src/content/docs/api-reference/overview.md @@ -0,0 +1,107 @@ +--- +title: API Reference +description: Overview of the YAPTIDE REST API. +--- + +The YAPTIDE backend exposes a RESTful JSON API built with Flask-RESTful. All endpoints are registered in a single `initialize_routes()` function. + +## Base URL + +``` +https:/// +``` + +In a local Docker Compose setup: `http://localhost/` + +## Authentication + +Most endpoints require a valid JWT access token. The token is sent as an HTTP-only cookie named `access_token`, set automatically on login. + +Two authentication modes: + +| Mode | Login Endpoint | Token Source | +|---|---|---| +| **Native** | `POST /auth/login` | Backend-issued JWT | +| **Keycloak SSO** | `POST /auth/keycloak` | Keycloak JWT exchanged for backend JWT | + +Protected endpoints return `401 Unauthorized` if no valid token is present. + +## Endpoint Groups + +| Group | Prefix | Description | +|---|---|---| +| [Auth](/for_developers/api-reference/auth/) | `/auth/*` | Registration, login, logout, token refresh | +| [Jobs](/for_developers/api-reference/jobs/) | `/jobs/*` | Submit, monitor, and cancel simulations | +| [Results](/for_developers/api-reference/results/) | `/results`, `/estimators`, `/inputs`, `/logfiles` | Retrieve simulation output | +| [User](/for_developers/api-reference/user/) | `/user/*`, `/clusters` | User profile and cluster management | + +## Common Patterns + +### Response Format + +All responses return JSON. Successful responses include a `message` field: + +```json +{ + "message": "...", + ... +} +``` + +Error responses include `message` and often `status_code`: + +```json +{ + "message": "Description of what went wrong", + "status_code": 400 +} +``` + +### Query Parameters vs Body + +- **GET** and **DELETE** endpoints use query parameters. +- **POST** and **PUT** endpoints accept a JSON request body. + +### Internal Endpoints + +Some endpoints are **worker-facing only** — called by Celery simulation workers or batch polling helpers to report progress back to the backend: + +| Endpoint | Purpose | +|---|---| +| `POST /jobs` | Update job state | +| `POST /results` | Upload simulation results | +| `POST /logfiles` | Upload log files | +| `POST /tasks` | Update individual task state | + +These require an `update_key` (shared secret) rather than user authentication. + +## Quick Reference + +| Method | Path | Auth | Description | +|---|---|---|---| +| GET | `/` | No | Health check | +| PUT | `/auth/register` | No | Register user | +| POST | `/auth/login` | No | Log in | +| GET | `/auth/refresh` | Yes | Refresh token | +| GET | `/auth/status` | Yes | Current user info | +| DELETE | `/auth/logout` | Yes | Log out | +| POST | `/auth/keycloak` | No* | Keycloak login | +| DELETE | `/auth/keycloak` | Yes | Keycloak logout | +| POST | `/jobs/direct` | Yes | Submit direct job | +| GET | `/jobs/direct` | Yes | Get direct job status | +| DELETE | `/jobs/direct` | Yes | Cancel direct job | +| POST | `/jobs/batch` | Yes† | Submit batch job | +| GET | `/jobs/batch` | Yes† | Get batch job status | +| DELETE | `/jobs/batch` | Yes† | Cancel batch job | +| GET | `/jobs` | Yes | Get job status (DB) | +| GET | `/results` | Yes | Get results | +| GET | `/estimators` | Yes | Get estimator metadata | +| GET | `/inputs` | Yes | Get input config | +| GET | `/logfiles` | Yes | Get log files | +| GET | `/user/simulations` | Yes | List user's simulations | +| DELETE | `/user/simulations` | Yes | Delete a simulation | +| POST | `/user/update` | Yes | Update profile | +| GET | `/clusters` | Yes† | List HPC clusters | + +\* Requires a valid Keycloak token in the request body. +† Requires Keycloak authentication (PLGrid access). diff --git a/src/content/docs/api-reference/results.md b/src/content/docs/api-reference/results.md new file mode 100644 index 0000000..30c4bdf --- /dev/null +++ b/src/content/docs/api-reference/results.md @@ -0,0 +1,247 @@ +--- +title: Results Endpoints +description: Simulation results, estimators, inputs, and log file retrieval. +--- + +## Get Results + +Retrieve simulation results with optional filtering by estimator and page. + +```http +GET /results?job_id=abc123-def456 +Cookie: access_token= +``` + +**Query parameters:** + +| Parameter | Type | Required | Description | +|---|---|---|---| +| `job_id` | string | Yes | Simulation job ID | +| `estimator_name` | string | No | Filter by specific estimator name | +| `page_number` | integer | No | Single page number | +| `page_numbers` | string | No | Page range, e.g. `"1-3,5"` | + +### Get All Results + +```http +GET /results?job_id=abc123-def456 +``` + +**Response** `200 OK` + +```json +{ + "message": "Results", + "estimators": [ + { + "name": "Detector0", + "pages": [ + { + "page_number": 1, + "name": "Z (Dose)", + "data": { + "values": [0.0, 0.001, 0.015, ...], + "dimensions": [[0.0, 0.5, 1.0, ...]], + "unit": "Gy" + } + } + ] + } + ] +} +``` + +### Filter by Estimator + +```http +GET /results?job_id=abc123-def456&estimator_name=Detector0 +``` + +Returns only pages for the named estimator. + +### Filter by Page + +```http +GET /results?job_id=abc123-def456&page_numbers=1-3,5 +``` + +Returns only the specified pages across all estimators. + +> **Note:** Result data is stored compressed (zlib) in the database. The API decompresses transparently. + +--- + +## Get Estimator Metadata + +Lightweight endpoint that returns estimator names and page metadata **without** the full data arrays. + +```http +GET /estimators?job_id=abc123-def456 +Cookie: access_token= +``` + +**Response** `200 OK` + +```json +{ + "message": "Estimators", + "estimators": [ + { + "name": "Detector0", + "pages": [ + { + "page_number": 1, + "name": "Z (Dose)", + "dimensions": [400] + }, + { + "page_number": 2, + "name": "Z (Fluence)", + "dimensions": [400] + } + ] + } + ] +} +``` + +Use this endpoint to build a results selector UI before fetching the full data. + +--- + +## Get Inputs + +Retrieve the input configuration that was used for a simulation. + +```http +GET /inputs?job_id=abc123-def456 +Cookie: access_token= +``` + +**Response** `200 OK` + +The response format depends on the `input_type` used during submission: + +### Editor Input + +```json +{ + "message": "Inputs", + "input_type": "editor", + "input": { + "project": { ... }, + "beam": { ... }, + "figureManager": { ... } + } +} +``` + +### File Input + +```json +{ + "message": "Inputs", + "input_type": "files", + "input": { + "beam.dat": "...", + "geo.dat": "...", + "mat.dat": "...", + "detect.dat": "..." + } +} +``` + +--- + +## Get Log Files + +Retrieve simulation log files (stdout/stderr from the simulator process). + +```http +GET /logfiles?job_id=abc123-def456 +Cookie: access_token= +``` + +**Response** `200 OK` + +```json +{ + "message": "Logfiles", + "logfiles": { + "task_1": "SHIELD-HIT12A version 1.0.0\nStarting simulation...\n...", + "task_2": "..." + } +} +``` + +Log files are useful for debugging failed simulations — they contain the simulator's own error messages. + +--- + +## Internal: Upload Results + +**Worker-facing only.** Simulation workers call this to save results to the database. + +```http +POST /results +Content-Type: application/json + +{ + "simulation_id": "abc123-def456", + "update_key": "", + "estimators": [ + { + "name": "Detector0", + "pages": [ + { + "page_number": 1, + "name": "Z (Dose)", + "data": { ... } + } + ] + } + ] +} +``` + +--- + +## Internal: Upload Log Files + +**Worker-facing only.** + +```http +POST /logfiles +Content-Type: application/json + +{ + "simulation_id": "abc123-def456", + "update_key": "", + "logfiles": { + "stdout": "...", + "stderr": "..." + } +} +``` + +--- + +## Internal: Update Task State + +**Worker-facing only.** Updates the state of an individual simulation task. + +```http +POST /tasks +Content-Type: application/json + +{ + "simulation_id": "abc123-def456", + "task_id": 1, + "update_key": "", + "update_dict": { + "task_state": "COMPLETED", + "simulated_primaries": 1000, + "requested_primaries": 1000 + } +} +``` diff --git a/src/content/docs/api-reference/user.md b/src/content/docs/api-reference/user.md new file mode 100644 index 0000000..e11c341 --- /dev/null +++ b/src/content/docs/api-reference/user.md @@ -0,0 +1,143 @@ +--- +title: User Endpoints +description: User profile management and cluster access API. +--- + +## List Simulations + +Get a paginated list of the authenticated user's simulations. + +```http +GET /user/simulations?page_size=10&page_idx=1 +Cookie: access_token= +``` + +**Query parameters:** + +| Parameter | Type | Default | Description | +|---|---|---|---| +| `page_size` | integer | 6 | Number of simulations per page | +| `page_idx` | integer | 1 | Page number (1-indexed) | +| `order_by` | string | `start_time` | Sort field: `start_time` or `end_time` | +| `order_type` | string | `descend` | Sort order: `ascend` or `descend` | +| `job_state` | string | — | Filter by state(s), comma-separated: `COMPLETED,FAILED` | + +**Response** `200 OK` + +```json +{ + "message": "User simulations", + "simulations": [ + { + "job_id": "abc123-def456", + "title": "Water phantom dose", + "start_time": "2024-01-15T10:00:00Z", + "end_time": "2024-01-15T10:05:23Z", + "job_state": "COMPLETED", + "sim_type": "shieldhit", + "input_type": "editor", + "platform": "direct", + "ntasks": 10 + }, + { + "job_id": "batch-789xyz", + "title": "Carbon beam spread", + "start_time": "2024-01-14T14:00:00Z", + "end_time": "2024-01-14T15:30:00Z", + "job_state": "COMPLETED", + "sim_type": "shieldhit", + "input_type": "editor", + "platform": "batch", + "ntasks": 100 + } + ], + "page_count": 5, + "simulations_count": 47 +} +``` + +**Filtering by state:** + +```http +GET /user/simulations?job_state=COMPLETED,FAILED +``` + +Returns only simulations in the specified states. + +--- + +## Delete Simulation + +Remove a simulation and all its associated data (results, logfiles, inputs) from the database. The simulation must be in a terminal state (`COMPLETED`, `FAILED`, or `CANCELED`). + +```http +DELETE /user/simulations?job_id=abc123-def456 +Cookie: access_token= +``` + +**Response** `200 OK` + +```json +{ + "message": "Simulation deleted" +} +``` + +**Errors:** +- `400` — Missing `job_id` +- `403` — Simulation belongs to another user +- `404` — Simulation not found +- `409` — Simulation is still running (cannot delete) + +--- + +## Update User + +Update the authenticated user's profile information. + +```http +POST /user/update +Cookie: access_token= +Content-Type: application/json + +{ + "field": "value" +} +``` + +**Response** `200 OK` + +```json +{ + "message": "User updated" +} +``` + +--- + +## List Clusters + +List available HPC clusters for batch job submission. **Requires Keycloak authentication** (PLGrid access). + +```http +GET /clusters +Cookie: access_token= +``` + +**Response** `200 OK` + +```json +{ + "message": "Available clusters", + "clusters": [ + { + "cluster_name": "prometheus", + "display_name": "Prometheus (Cyfronet)", + "available": true + } + ] +} +``` + +**Errors:** +- `403` — Not a Keycloak-authenticated user (local users cannot access HPC clusters) diff --git a/src/content/docs/architecture/auth-model.md b/src/content/docs/architecture/auth-model.md new file mode 100644 index 0000000..acb4f53 --- /dev/null +++ b/src/content/docs/architecture/auth-model.md @@ -0,0 +1,171 @@ +--- +title: Authentication Model +description: How authentication and authorization work across the YAPTIDE system. +--- + +YAPTIDE supports two authentication methods: **native Yaptide auth** (username/password) and **Keycloak SSO** (PLGrid federation). Both issue JWT tokens stored in httpOnly cookies. + +## Authentication Methods + +| Method | When Used | Users | +|---|---|---| +| **Yaptide Native** | Development, standalone deployments | Any registered user | +| **Keycloak SSO** | Production, PLGrid-integrated deployments | PLGrid-federated users | + +## Native Authentication Flow + +Simple username/password registration and login. Passwords are hashed with Werkzeug's security module. + +``` +┌──────────┐ ┌─────────┐ +│ UI │ │ Backend │ +└────┬─────┘ └────┬────┘ + │ │ + │ PUT /auth/register │ + │ { username, password } │ + │─────────────────────────────────────>│ + │ │ Hash password + │ │ Create YaptideUserModel + │ 201 Created │ + │<─────────────────────────────────────│ + │ │ + │ POST /auth/login │ + │ { username, password } │ + │─────────────────────────────────────>│ + │ │ Verify password hash + │ │ Generate JWT access + refresh tokens + │ 200 OK │ + │ Set-Cookie: access_token (httpOnly) │ + │ Set-Cookie: refresh_token (httpOnly)│ + │ Body: { access_exp } │ + │<─────────────────────────────────────│ + │ │ + │ GET /auth/refresh │ + │ Cookie: refresh_token │ + │─────────────────────────────────────>│ + │ │ Validate refresh token + │ │ Generate new access token + │ 200 OK │ + │ Set-Cookie: access_token (httpOnly) │ + │<─────────────────────────────────────│ +``` + +### Token Lifecycle + +| Token | Lifetime | Storage | +|---|---|---| +| Access token | 10 minutes | httpOnly cookie | +| Refresh token | 120 minutes | httpOnly cookie | +| Simulation update key | 7 days | Backend internal | + +The UI auto-refreshes the access token at **1/3 of its lifetime** (approximately every 3 minutes) by hitting `GET /auth/refresh`. + +## Keycloak SSO Flow + +Used for PLGrid-integrated deployments. The UI manages the Keycloak session, then exchanges the Keycloak token with the backend for a local JWT. + +``` +┌──────────┐ ┌───────────┐ ┌──────────┐ +│ UI │ │ Keycloak │ │ Backend │ +└────┬─────┘ └─────┬─────┘ └────┬─────┘ + │ │ │ + │ OIDC login (PKCE S256) │ + │ Redirect to Keycloak │ + │───────────────────>│ │ + │ │ │ + │ User authenticates│ │ + │ (PLGrid credentials) │ + │<───────────────────│ │ + │ Keycloak tokens │ │ + │ (access + refresh)│ │ + │ │ │ + │ POST /auth/keycloak │ + │ Authorization: Bearer │ + │───────────────────────────────────────>│ + │ │ + │ Validate token against │ + │ Keycloak JWKS endpoint │ + │ Check PLG_YAPTIDE_ACCESS │ + │ Fetch SSH certificates │ + │ Create/update KeycloakUser │ + │ Generate local JWT │ + │ │ + │ 200 OK │ + │ Set-Cookie: access_token (httpOnly) │ + │ Set-Cookie: refresh_token (httpOnly) │ + │<───────────────────────────────────────│ +``` + +### Keycloak Configuration + +The UI uses `keycloak-js` SDK with these settings: + +| Setting | Value | +|---|---| +| Flow | Standard (Authorization Code) | +| PKCE challenge | S256 | +| Silent SSO check | Enabled (`silentCheckSsoRedirectUri`) | +| Token refresh | Auto-refresh when <5 min remaining | + +Required environment variables: + +```bash +REACT_APP_KEYCLOAK_BASE_URL=https://keycloak.example.com +REACT_APP_KEYCLOAK_REALM=yaptide +REACT_APP_KEYCLOAK_CLIENT_ID=my-client +REACT_APP_ALT_AUTH=plg +``` + +### PLGrid Service Verification + +When a Keycloak token arrives, the backend checks the `PLG_YAPTIDE_ACCESS` claim in the token. This ensures the user has been granted access to the YAPTIDE service in the PLGrid infrastructure. + +The backend also: +1. Fetches **SSH certificates** from a dedicated cert-auth service (`CERT_AUTH_URL`) +2. Stores the certificate and private key in `KeycloakUserModel` +3. Uses these credentials for SSH connections to HPC clusters when submitting batch jobs + +## Demo Mode + +When `REACT_APP_TARGET=demo`, authentication is bypassed entirely and only in-browser Geant4 simulations are available. See [Frontend Demo — Local](/for_developers/local-setup/local-frontend-demo/) for setup instructions. + +## Backend Authorization + +### The `@requires_auth` Decorator + +All protected endpoints use the `@requires_auth()` decorator, which: + +1. Extracts the JWT access token from the `access_token` cookie +2. Decodes and validates the token (signature, expiry) +3. Loads the `UserModel` from the database +4. Injects the `user` object into the Flask request context + +```python +@requires_auth() +def post(self, user: UserModel): + # user is automatically injected + simulation = SimulationModel(user_id=user.id, ...) +``` + +### User Model Hierarchy + +The user model uses **SQLAlchemy polymorphic inheritance** on the `auth_provider` discriminator: + +``` +UserModel (base) +├── YaptideUserModel (auth_provider="yaptide") +│ └── password_hash +└── KeycloakUserModel (auth_provider="keycloak") + ├── cert (SSH certificate) + └── private_key (SSH private key) +``` + +This allows the backend to transparently handle both auth methods while storing auth-specific fields only where needed. + +## Security Notes + +- All tokens are stored in **httpOnly cookies** — not accessible to JavaScript (`document.cookie`) +- CORS is configurable via `FLASK_USE_CORS` (enabled for local dev with `localhost:3000`) +- Nginx terminates TLS (self-signed cert for development, real cert for production) +- Passwords are hashed with Werkzeug's `generate_password_hash` (PBKDF2) +- Keycloak tokens are validated against the **JWKS endpoint** (asymmetric signature verification) diff --git a/src/content/docs/architecture/data-flow.md b/src/content/docs/architecture/data-flow.md new file mode 100644 index 0000000..b3f74b4 --- /dev/null +++ b/src/content/docs/architecture/data-flow.md @@ -0,0 +1,150 @@ +--- +title: Data Flow +description: End-to-end flow of a simulation through the YAPTIDE system. +--- + +This page traces the lifecycle of a simulation from geometry creation to result visualization, covering all three execution paths. + +## High-Level Flow + +``` +User creates geometry → Editor JSON → Converter → Simulator input files + │ + ┌───────────────────────────────────┘ + │ + ┌────────────┼────────────┐ + ▼ ▼ ▼ + Direct Batch Local + (Celery) (Slurm/SSH) (Geant4 Wasm) + │ │ │ + ▼ ▼ ▼ + Simulator runs on their respective platforms + │ │ │ + └────────────┼────────────┘ + │ + ▼ + Results stored/returned + │ + ▼ + UI renders plots (JSRoot) +``` + +## Step 1: Scene Construction + +The user works in the **3D Editor** (Three.js viewport) to build a simulation scene: + +1. **Add figures** — boxes, cylinders, spheres with position, rotation, and dimensions. +2. **Define zones** — boolean operations (union, intersection, subtraction) on figures. Each zone is assigned a material. +3. **Configure beam** — particle type, energy, position, direction, sigma, divergence. +4. **Set up detectors** — scoring geometries (cylindrical, mesh, zone-based, or global). +5. **Define outputs** — which quantities to score (dose, fluence, LET), with optional particle filters. +6. **Physics settings** — energy loss model, nuclear reactions, multiple scattering. + +The editor continuously serializes this scene into the **editor JSON** format. This JSON is auto-saved to `localStorage`. + +## Step 2: Format Conversion + +Before a simulation can run, the editor JSON must be converted to the target simulator's native input format. + +**Server-side** (Direct and Batch paths): +```python +from converter.api import get_parser_from_str, run_parser + +parser = get_parser_from_str("shieldhit") # or "fluka", "geant4", "topas" +files_dict = run_parser(parser, json_data, output_dir=Path("./out")) +# files_dict = {"beam.dat": "...", "mat.dat": "...", "geo.dat": "...", "detect.dat": "..."} +``` + +**Client-side** (input file preview and Geant4 Wasm path): +The same converter runs in the browser via **Pyodide** (Python compiled to WebAssembly). The Pyodide Web Worker calls `converter.api.run_parser` and returns the generated files to the UI thread via `comlink`. + +## Step 3a: Direct Execution (Celery) + +Used for SHIELD-HIT12A and FLUKA when running on the backend server. + +``` +POST /jobs/direct + body: { sim_data, ntasks, sim_type: "shieldhit" } +``` + +1. Flask creates a `CelerySimulationModel` row and N `CeleryTaskModel` rows. +2. A **Celery chord** is dispatched: N `run_single_simulation` tasks execute in parallel. +3. Each task: + - Writes input files to a temp directory + - Spawns the simulator binary as a subprocess + - Monitors progress via a background thread reading stdout/logfiles + - POSTs status updates to `POST /tasks` (primaries completed, estimated time) +4. When all N tasks complete, a **merge task** (`get_job_results`) runs: + - Averages estimator results across all tasks + - Compresses and stores results in the database (`EstimatorModel` → `PageModel`) +5. Job state transitions: `PENDING → RUNNING → MERGING_QUEUED → MERGING_RUNNING → COMPLETED` + +**Polling**: The UI polls `GET /jobs/direct?job_id=` at intervals to check status and fetch partial progress. + +## Step 3b: Batch Execution (Slurm via SSH) + +Used for SHIELD-HIT12A and FLUKA on HPC clusters (e.g., PLGrid). + +``` +POST /jobs/batch + body: { sim_data, ntasks, sim_type: "shieldhit", batch_options: { cluster_name: "ares" } } +``` + +1. Flask creates a `BatchSimulationModel` row. +2. The **helper worker** picks up a `submit_job` Celery task and: + - Connects to the cluster via SSH using the user's PLGrid certificate (stored in `KeycloakUserModel`) + - Uploads input files as a compressed archive + - Uploads watcher and data-sender scripts + - Submits a **Slurm array job** (`sbatch`) + a collect job +3. The cluster's **watcher script** monitors each array task and POSTs progress back to the YAPTIDE backend. +4. When all array tasks finish, the **collect job** gathers results and POSTs them to `POST /results`. + +**Polling**: The UI polls `GET /jobs/batch?job_id=`. The backend also checks cluster status via `sacct` over SSH. + +## Step 3c: Local Execution (Geant4 Wasm) + +Used for Geant4 simulations. Runs entirely in the browser — no backend required. + +1. The Pyodide converter generates `geometry.gdml` and `run.mac`. +2. These files are passed to the **Geant4 Wasm Worker**. +3. Geant4 executes the simulation in WebAssembly. +4. Output files are parsed by `Geant4ResultsFileParser`. +5. Results are rendered directly — no server round-trip. + +:::note + This path works in **demo mode** (`REACT_APP_TARGET=demo`), making it possible to run Geant4 simulations without any backend or authentication. +::: + +## Step 4: Result Visualization + +Regardless of the execution path, results follow the same structure: + +- **Estimators** — named result containers (e.g., `detector_dose`, `mesh_fluence`) +- **Pages** — individual scoring dimensions within an estimator (e.g., energy bins, spatial slices) + +Each page contains: +- `page_name` — descriptive name +- `page_dimension` — number of dimensions +- `compressed_data` — gzip-compressed JSON with axes, values, and metadata + +The UI renders results using **JSRoot** (CERN ROOT's JavaScript library): +- 1D histograms (e.g., Bragg peak depth-dose curves) +- 2D color maps (e.g., spatial dose distributions) +- Interactive zoom, pan, and cursor readout +- CSV export + +## Data Persistence + +All simulation data is stored in PostgreSQL with gzip compression. + +| Data | Storage | +|---|---| +| Input files (JSON or raw) | `InputModel.compressed_data` | +| Estimator metadata | `EstimatorModel` (name, filename) | +| Result pages | `PageModel.compressed_data` | +| Simulation logs | `LogfilesModel.compressed_data` | +| Job state and metadata | `SimulationModel` + `TaskModel` | + +:::note + The model names above refer to SQLAlchemy models defined in [`yaptide/persistence/models.py`](https://github.com/yaptide/yaptide/blob/master/yaptide/persistence/models.py). +::: diff --git a/src/content/docs/architecture/overview.md b/src/content/docs/architecture/overview.md new file mode 100644 index 0000000..7904680 --- /dev/null +++ b/src/content/docs/architecture/overview.md @@ -0,0 +1,137 @@ +--- +title: System Overview +description: High-level architecture of the YAPTIDE platform. +--- + +YAPTIDE is composed of **three repositories** that work together to provide a complete simulation workflow — from geometry creation to result visualization. + +## Architecture Diagram + +``` +┌─────────────────────────────────────────────────────────┐ +│ Browser │ +│ │ +│ ┌──────────────────────────────────────────────────┐ │ +│ │ React + Three.js UI │ │ +│ │ │ │ +│ │ ┌──────────────┐ ┌──────────────────────────┐ │ │ +│ │ │ 3D Editor │ │ Pyodide Web Worker │ │ │ +│ │ │ (Three.js) │ │ (Python Converter) │ │ │ +│ │ └──────────────┘ └──────────────────────────┘ │ │ +│ │ ┌─────────────────────────┐ │ │ +│ │ │ Geant4 Wasm Worker │ │ │ +│ │ │ (local simulations) │ │ │ +│ │ └─────────────────────────┘ │ │ +│ └──────────────────────┬───────────────────────────┘ │ +└─────────────────────────┼───────────────────────────────┘ + │ HTTPS (REST API) +┌─────────────────────────┼───────────────────────────────┐ +│ Nginx │ :8443 │ +│ ┌───────┴───────┐ │ +│ │ Flask API │ :6000 │ +│ └───┬───────┬──┘ │ +│ │ │ │ +│ ┌──────────┤ ├──────────┐ │ +│ ▼ ▼ ▼ ▼ │ +│ ┌──────────┐ ┌───────┐ ┌─────────────────────┐ │ +│ │ Redis │ │ PgSQL │ │ Celery Workers │ │ +│ │ (broker) │ │ (DB) │ │ │ │ +│ └──────────┘ └───────┘ │ simulation_worker │ │ +│ │ helper_worker │ │ +│ └────────┬────────────┘ │ +│ │ │ +│ ┌────────┴────────────┐ │ +│ │ Simulator Binaries │ │ +│ │ SHIELD-HIT12A │ │ +│ │ FLUKA │ │ +│ │ TOPAS │ │ +│ └─────────────────────┘ │ +│ │ +│ Backend Server │ +└─────────────────────────────────────────────────────────┘ + │ + │ SSH (Fabric) + ▼ + ┌─────────────────┐ + │ HPC Cluster │ + │ (PLGrid/Slurm) │ + └─────────────────┘ +``` + +## Component Responsibilities + +### Backend (`yaptide` repository) + +The backend is a **Flask 3.1** API server with **Celery 5.5** for async task execution and **PostgreSQL 16** for persistence. + +- **REST API** — Handles authentication, job submission, status polling, and result retrieval. +- **Simulation Worker** — Celery worker that runs simulator binaries (SHIELD-HIT12A, FLUKA) in temporary directories, monitors progress, and posts updates back to Flask. +- **Helper Worker** — Celery worker for HPC batch job submission via SSH (PLGrid/Slurm) and cleanup tasks. +- **Nginx** — Reverse proxy with TLS termination. + +### UI (`ui` repository) + +The frontend is a **React 19** single-page application built with **Three.js** for 3D visualization and **MUI** for the interface. It runs entirely in the browser. + +- **3D Editor** — CSG-based geometry builder with a 4-way split viewport (XY, XZ, YZ, perspective). Users place figures (boxes, cylinders, spheres), define boolean zones, assign materials, and configure detectors. +- **Pyodide Converter** — The `yaptide-converter` Python package compiled to WebAssembly via Pyodide, running in a Web Worker. Converts editor JSON to simulator input files without a server round-trip. +- **Geant4 Wasm Worker** — A full Geant4 runtime compiled to WebAssembly. Enables running Geant4 simulations entirely in the browser. +- **Result Visualization** — JSRoot (CERN ROOT) renders interactive histograms and 2D profiles. + +### Converter (`converter` repository) + +A standalone **Python package** that translates the editor's JSON project format into native simulator input files. + +- **Input**: JSON from the 3D editor (geometry, beam, materials, scoring, physics) +- **Output**: Engine-specific files (`beam.dat`, `.inp`, `.gdml`, `.mac`) +- **Used in two contexts**: imported by the Flask backend for server-side conversion, and compiled to Wasm for in-browser use via Pyodide. + +## Execution Paths + +YAPTIDE supports three ways to run a simulation: + +| Path | Engine | Where it Runs | How it Works | +|---|---|---|---| +| **Direct (Celery)** | SHIELD-HIT12A, FLUKA | Backend server | Celery chord: N parallel tasks → merge results | +| **Batch (Slurm)** | SHIELD-HIT12A, FLUKA | HPC cluster (PLGrid) | SSH → Slurm array job → watcher script → callbacks | +| **Local (Wasm)** | Geant4 | User's browser | Geant4 compiled to WebAssembly, no server needed | + +:::tip + Want to understand how data moves through these paths end-to-end? See [Data Flow](/for_developers/architecture/data-flow/). +::: + +## The JSON Contract + +The **editor JSON** is the canonical data format that ties all three repositories together. The UI produces it, the converter consumes it, and the backend stores it. + +Top-level keys: +- `project` — title and metadata +- `beam` — particle type, energy, position, direction +- `figureManager.figures[]` — 3D solid primitives +- `zoneManager.zones[]` — boolean CSG operations on figures +- `materialManager.materials[]` — material definitions (ICRU) +- `detectorManager.detectors[]` — scoring detector geometries +- `scoringManager` — outputs, quantities, filters +- `physic` — physics model configuration + +:::tip +See [Project JSON Schema](/for_developers/architecture/project-json-schema/) for the full specification. +::: + +## Technology Stack + +| Layer | Technology | +|---|---| +| Frontend framework | React, TypeScript | +| 3D rendering | Three.js | +| UI components | MUI | +| Backend framework | Flask | +| Task queue | Celery, Redis | +| Database | PostgreSQL, SQLAlchemy | +| Auth | JWT, Keycloak OIDC | +| Converter | Python | +| In-browser Python | Pyodide | +| In-browser simulation | Geant4 WebAssembly | +| Result rendering | JSRoot | +| Containerization | Docker Compose | +| Reverse proxy | Nginx | diff --git a/src/content/docs/architecture/project-json-schema.md b/src/content/docs/architecture/project-json-schema.md new file mode 100644 index 0000000..199724b --- /dev/null +++ b/src/content/docs/architecture/project-json-schema.md @@ -0,0 +1,249 @@ +--- +title: Project JSON Schema +description: The canonical JSON format produced by the 3D editor and consumed by the converter. +--- + +The **editor JSON** is the central data format in YAPTIDE. The UI produces it, the converter consumes it, and the backend stores it. Understanding this schema is essential for working on any part of the system. + +## Top-Level Structure + +```json +{ + "project": { ... }, + "beam": { ... }, + "figureManager": { "figures": [ ... ] }, + "zoneManager": { "zones": [ ... ], "worldZone": { ... } }, + "materialManager": { "materials": [ ... ] }, + "detectorManager": { "detectors": [ ... ] }, + "scoringManager": { "outputs": [ ... ], "filters": [ ... ] }, + "physic": { ... }, + "specialComponentsManager": { ... } +} +``` + +## `project` + +Metadata about the simulation. + +```json +{ + "project": { + "title": "My Simulation" + } +} +``` + +## `beam` + +Particle source configuration. + +| Field | Type | Description | +|---|---|---| +| `id` | `number` | Particle type ID (see SHIELD-HIT12A particle table) | +| `energy` | `number` | Kinetic energy | +| `energyUnit` | `string` | `"MeV"` or `"MeV/nucl"` | +| `energySpread` | `number` | Energy spread (standard deviation) | +| `numberOfParticles` | `number` | Total primaries to simulate | +| `position` | `{x, y, z}` | Source position in cm | +| `direction` | `{x, y, z}` | Beam direction unit vector | +| `sigma` | `{x, y}` | Gaussian beam profile sigma | +| `sad` | `{x, y}` | Source-to-axis distance (for divergent beams) | +| `sourceFile` | `object \| null` | External source definition file | + +```json +{ + "beam": { + "id": 2, + "energy": 150.0, + "energyUnit": "MeV", + "energySpread": 1.5, + "numberOfParticles": 10000, + "position": { "x": 0, "y": 0, "z": 0 }, + "direction": { "x": 0, "y": 0, "z": 1 }, + "sigma": { "x": 0.1, "y": 0.1 }, + "sad": { "x": 0, "y": 0 } + } +} +``` + +## `figureManager.figures[]` + +3D geometric primitives. Each figure represents a solid shape in the scene. + +| Field | Type | Description | +|---|---|---| +| `type` | `string` | `"BoxFigure"`, `"CylinderFigure"`, or `"SphereFigure"` | +| `name` | `string` | Display name | +| `uuid` | `string` | Unique identifier | +| `position` | `{x, y, z}` | Center position in cm | +| `rotation` | `{x, y, z}` | Tait-Bryan rotation angles in radians | +| `parameters` | `object` | Shape-specific dimensions | + +**Box parameters**: `{ xLength, yLength, zLength }` (half-lengths in cm) + +**Cylinder parameters**: `{ radius, height }` (in cm, along Z-axis) + +**Sphere parameters**: `{ radius }` (in cm) + +```json +{ + "type": "BoxFigure", + "name": "WaterPhantom", + "uuid": "abc-123", + "position": { "x": 0, "y": 0, "z": 15 }, + "rotation": { "x": 0, "y": 0, "z": 0 }, + "parameters": { "xLength": 10, "yLength": 10, "zLength": 20 } +} +``` + +## `zoneManager.zones[]` + +CSG (Constructive Solid Geometry) zones define regions of space by combining figures through boolean operations. Each zone is assigned a material. + +| Field | Type | Description | +|---|---|---| +| `uuid` | `string` | Unique identifier | +| `name` | `string` | Display name | +| `materialUuid` | `string` | Reference to a material in `materialManager` | +| `unionOperations` | `array` | List of figure UUIDs included (union) | +| `intersectionOperations` | `array` | List of figure UUIDs used for intersection | +| `subtractionOperations` | `array` | List of figure UUIDs subtracted | + +### `zoneManager.worldZone` + +The bounding world volume. Everything outside is treated as vacuum or a "black hole" (absorbing boundary). + +```json +{ + "worldZone": { + "uuid": "world-001", + "figure": { + "type": "BoxFigure", + "parameters": { "xLength": 50, "yLength": 50, "zLength": 50 } + }, + "materialUuid": "vacuum-uuid" + } +} +``` + +## `materialManager.materials[]` + +Material definitions based on ICRU identifiers. + +| Field | Type | Description | +|---|---|---| +| `uuid` | `string` | Unique identifier | +| `name` | `string` | Display name (e.g., "Water", "Air", "Bone") | +| `icru` | `number` | ICRU material number | +| `density` | `number` | Override density (optional) | + +```json +{ + "uuid": "mat-001", + "name": "Water", + "icru": 276, + "density": 1.0 +} +``` + +## `detectorManager.detectors[]` + +Scoring detector geometries. Detectors define *where* quantities are scored. + +| Field | Type | Description | +|---|---|---| +| `uuid` | `string` | Unique identifier | +| `type` | `string` | `"Cylinder"`, `"Mesh"`, `"Zone"`, or `"All"` | +| `name` | `string` | Display name | +| `geometryData` | `object` | Type-specific geometry definition | + +**Cylinder detector** (cylindrical scoring mesh): +```json +{ + "type": "Cylinder", + "geometryData": { + "position": { "x": 0, "y": 0, "z": 15 }, + "radius": { "min": 0, "max": 5, "bins": 1 }, + "zAxis": { "min": 0, "max": 30, "bins": 300 } + } +} +``` + +**Mesh detector** (rectangular scoring grid): +```json +{ + "type": "Mesh", + "geometryData": { + "position": { "x": 0, "y": 0, "z": 15 }, + "xAxis": { "min": -5, "max": 5, "bins": 100 }, + "yAxis": { "min": -5, "max": 5, "bins": 100 }, + "zAxis": { "min": 0, "max": 30, "bins": 1 } + } +} +``` + +## `scoringManager` + +Defines what quantities are scored and how results are filtered. + +### `scoringManager.outputs[]` + +Each output links a detector to a set of quantities. + +| Field | Type | Description | +|---|---|---| +| `name` | `string` | Output filename | +| `detectorUuid` | `string` | Reference to a detector | +| `quantities` | `array` | List of scored quantities | + +### `scoringManager.outputs[].quantities[]` + +| Field | Type | Description | +|---|---|---| +| `type` | `string` | Quantity keyword (e.g., `"Dose"`, `"Fluence"`, `"LET"`, `"dLET"`) | +| `filterUuid` | `string \| null` | Optional reference to a particle filter | +| `rescale` | `number` | Rescaling factor | + +### `scoringManager.filters[]` + +Particle filters for conditional scoring. + +| Field | Type | Description | +|---|---|---| +| `uuid` | `string` | Unique identifier | +| `name` | `string` | Display name | +| `particleId` | `number` | Particle type ID to filter for | +| `rules` | `array` | Filter rules (e.g., energy range, angular cuts) | + +## `physic` + +Physics model configuration. + +| Field | Type | Description | +|---|---|---| +| `energyLoss` | `number` | Energy loss model selection | +| `nuclearReactions` | `number` | Nuclear reactions toggle (-1 = off, 1 = on) | +| `straggling` | `number` | Straggling model | +| `multipleScattering` | `number` | Multiple scattering model | +| `stoppingPowerFile` | `object \| null` | Custom stopping power file | +| `stepLength` | `number` | Maximum step length (cm) | + +## `specialComponentsManager` + +Optional special components such as beam modulators (SOBP wheel definitions) and CT data cubes. + +```json +{ + "specialComponentsManager": { + "modulatorConfig": { + "enabled": true, + "fileName": "modulator.dat", + "zones": [ ... ] + } + } +} +``` + +## Version History + +The editor JSON format is versioned. The `metadata.version` field tracks the schema version. The UI handles backward compatibility for older project files by migrating them on load. diff --git a/src/content/docs/backend/api-endpoints.md b/src/content/docs/backend/api-endpoints.md new file mode 100644 index 0000000..efe1359 --- /dev/null +++ b/src/content/docs/backend/api-endpoints.md @@ -0,0 +1,239 @@ +--- +title: API Endpoints +description: Guide to all YAPTIDE REST API endpoints. +--- + +All endpoints are registered in `routes/main_routes.py`. This page explains when and why each endpoint is called, grouped by domain. For the raw OpenAPI specification, see the [API Reference](/for_developers/api-reference/overview/). + +## Authentication + +### `PUT /auth/register` + +**Create a new Yaptide-native user account.** + +```json +// Request body +{ "username": "alice", "password": "secure123" } +``` + +Returns `201 Created` on success. The password is hashed with Werkzeug's PBKDF2-based hasher. + +### `POST /auth/login` + +**Log in with username and password.** + +```json +// Request body +{ "username": "alice", "password": "secure123" } +``` + +On success, sets two **httpOnly cookies** (`access_token`, `refresh_token`) and returns: +```json +{ "access_exp": 1709164800 } +``` + +The `access_exp` timestamp tells the frontend when to auto-refresh. + +### `GET /auth/refresh` + +**Refresh the access token.** Requires a valid `refresh_token` cookie. + +Returns a new `access_token` cookie and updated `access_exp`. + +### `GET /auth/status` + +**Get the current user's info.** Requires authentication. + +```json +{ "username": "alice" } +``` + +### `DELETE /auth/logout` + +**Log out.** Clears both auth cookies. + +### `POST /auth/keycloak` + +**Exchange a Keycloak bearer token for a local JWT session.** + +The frontend sends the Keycloak access token in the `Authorization` header. The backend: +1. Validates the token against the Keycloak JWKS endpoint +2. Checks the `PLG_YAPTIDE_ACCESS` claim +3. Fetches SSH certificates from the cert-auth service +4. Creates or updates a `KeycloakUserModel` +5. Issues local JWT cookies + +### `DELETE /auth/keycloak` + +**Log out a Keycloak user.** Clears auth cookies. + +## Jobs + +### `POST /jobs/direct` + +**Submit a simulation to run on the backend server (Celery).** + +```json +// Request body +{ + "sim_data": { ... }, // Editor JSON or raw input files + "ntasks": 4, // Number of parallel tasks + "sim_type": "shieldhit", // "shieldhit", "fluka", "topas" + "input_type": "editor" // "editor" (JSON) or "files" (raw input) +} +``` + +The backend: +1. Creates `CelerySimulationModel` + N `CeleryTaskModel` rows +2. Converts input (if `input_type: "editor"`) using the converter +3. Dispatches a Celery chord: N `run_single_simulation` tasks → 1 merge task + +Returns: +```json +{ "job_id": "abc-123-def" } +``` + +### `GET /jobs/direct` + +**Check the status of a direct simulation.** + +``` +GET /jobs/direct?job_id=abc-123-def +``` + +Returns job state, task progress, and metadata: +```json +{ + "job_state": "RUNNING", + "job_tasks_status": [ + { + "task_id": 1, + "task_state": "RUNNING", + "simulated_primaries": 5000, + "requested_primaries": 10000, + "estimated_time": 45 + } + ] +} +``` + +### `DELETE /jobs/direct` + +**Cancel a running direct simulation.** + +``` +DELETE /jobs/direct?job_id=abc-123-def +``` + +Revokes all Celery tasks and sets the job state to `CANCELED`. + +### `POST /jobs/batch` + +**Submit a simulation to an HPC cluster (Slurm via SSH).** + +```json +{ + "sim_data": { ... }, + "ntasks": 100, + "sim_type": "shieldhit", + "batch_options": { + "cluster_name": "ares", + "array_options": "--time=01:00:00", + "collect_options": "--time=00:30:00" + } +} +``` + +Requires Keycloak authentication (PLGrid SSH credentials stored in the user model). + +### `GET /jobs/batch` + +**Check the status of a batch simulation.** Queries the cluster via `sacct` over SSH. + +### `DELETE /jobs/batch` + +**Cancel a batch simulation.** Runs `scancel` on the cluster. + +## Results + +### `GET /results` + +**Retrieve simulation results (estimators + pages).** + +``` +GET /results?job_id=abc-123-def +``` + +Returns paginated estimator data with compressed page contents. + +> The UI fetches results only after the job reaches `COMPLETED` state. + +### `POST /results` + +**Store simulation results.** Called internally by the merge task or batch collect job. Not intended for external use. + +### `GET /estimators` + +**List estimator metadata for a job.** + +``` +GET /estimators?job_id=abc-123-def +``` + +Returns estimator names, filenames, and page counts — without the full data. + +### `GET /inputs` + +**Retrieve the simulation input files.** + +``` +GET /inputs?job_id=abc-123-def +``` + +Returns the converter-generated input files (or raw uploaded files) as a JSON dict. + +### `GET /logfiles` + +**Retrieve simulation log files.** + +``` +GET /logfiles?job_id=abc-123-def +``` + +## User Management + +### `GET /user/simulations` + +**List the current user's simulations.** Supports pagination. + +``` +GET /user/simulations?page_size=10&page_idx=0&order_by=start_time&order_type=desc +``` + +### `DELETE /user/simulations` + +**Delete a simulation and all associated data.** + +``` +DELETE /user/simulations?job_id=abc-123-def +``` + +## Clusters + +### `GET /clusters` + +**List available HPC clusters.** + +Returns cluster names and metadata for batch job submission. + +## Internal Endpoints + +### `POST /jobs` + +**Used by workers to update job-level state.** Not called by the frontend. + +### `POST /tasks` + +**Used by simulation workers to report task progress.** Carries: task state, simulated primaries, estimated time remaining. + +These internal endpoints use a **simulation update key** (7-day JWT) for authentication, not the user's session token. diff --git a/src/content/docs/backend/database.md b/src/content/docs/backend/database.md new file mode 100644 index 0000000..cf03681 --- /dev/null +++ b/src/content/docs/backend/database.md @@ -0,0 +1,241 @@ +--- +title: Database +description: Data model, schema, and migration workflow for the YAPTIDE backend. +--- + +The backend uses **PostgreSQL** via **SQLAlchemy** with **Flask-Migrate** (Alembic) for schema management. In tests, an in-memory SQLite database is used. + +## Data Model + +The database contains **12 tables** organized around users, simulations, tasks, and results. + +### Entity Relationships + +``` +UserModel (polymorphic) +├── YaptideUserModel +└── KeycloakUserModel + +UserModel ──< SimulationModel (polymorphic) + ├── CelerySimulationModel + └── BatchSimulationModel + +SimulationModel ──< TaskModel (polymorphic) + ├── CeleryTaskModel + └── BatchTaskModel + +SimulationModel ──< InputModel +SimulationModel ──< EstimatorModel ──< PageModel +SimulationModel ──< LogfilesModel + +ClusterModel ──< BatchSimulationModel +``` + +## Tables + +### User Tables + +**`User`** — base user table with polymorphic inheritance on `auth_provider`. + +| Column | Type | Description | +|---|---|---| +| `id` | Integer (PK) | Auto-incremented user ID | +| `username` | String | Unique username | +| `auth_provider` | String | `"yaptide"` or `"keycloak"` (discriminator) | + +**`YaptideUser`** — native auth users. + +| Column | Type | Description | +|---|---|---| +| `id` | Integer (FK → User) | | +| `password_hash` | String | Werkzeug PBKDF2 hash | + +**`KeycloakUser`** — Keycloak/PLGrid users. + +| Column | Type | Description | +|---|---|---| +| `id` | Integer (FK → User) | | +| `cert` | Text | SSH certificate (PEM) | +| `private_key` | Text | SSH private key (PEM) | + +### Simulation Tables + +**`Simulation`** — base simulation table with polymorphic inheritance on `platform`. + +| Column | Type | Description | +|---|---|---| +| `id` | Integer (PK) | Auto-incremented | +| `job_id` | String | UUID, used as the external identifier | +| `user_id` | Integer (FK → User) | Owner | +| `title` | String | Simulation title | +| `platform` | String | `"DIRECT"` or `"BATCH"` (discriminator) | +| `input_type` | String | `"editor"` or `"files"` | +| `sim_type` | String | `"shieldhit"`, `"fluka"`, `"topas"` | +| `job_state` | String | Current state (see lifecycle) | +| `start_time` | DateTime | Job submission time | +| `end_time` | DateTime | Job completion time | +| `update_key_hash` | String | Hashed JWT for worker auth | + +**`CelerySimulation`** — direct (Celery) simulations. + +| Column | Type | Description | +|---|---|---| +| `id` | Integer (FK → Simulation) | | +| `merge_id` | String | Celery task ID for the merge step | + +**`BatchSimulation`** — batch (Slurm) simulations. + +| Column | Type | Description | +|---|---|---| +| `id` | Integer (FK → Simulation) | | +| `cluster_id` | Integer (FK → Cluster) | Target HPC cluster | +| `job_dir` | String | Remote working directory on cluster | +| `array_id` | String | Slurm array job ID | +| `collect_id` | String | Slurm collect job ID | + +### Task Tables + +**`Task`** — individual simulation tasks within a job. + +| Column | Type | Description | +|---|---|---| +| `id` | Integer (PK) | | +| `task_id` | Integer | 0-based task index | +| `simulation_id` | Integer (FK → Simulation) | | +| `task_state` | String | Task state | +| `requested_primaries` | BigInteger | Target number of primaries | +| `simulated_primaries` | BigInteger | Completed primaries | +| `estimated_time` | Integer | Estimated remaining seconds | +| `start_time` | DateTime | | +| `end_time` | DateTime | | + +**`CeleryTask`** — Celery-specific task data. + +| Column | Type | Description | +|---|---|---| +| `id` | Integer (FK → Task) | | +| `celery_id` | String | Celery task UUID | + +**`BatchTask`** — batch-specific task data (minimal, inherits from Task). + +### Result Tables + +**`Input`** — stores simulation input files. + +| Column | Type | Description | +|---|---|---| +| `id` | Integer (PK) | | +| `simulation_id` | Integer (FK → Simulation) | | +| `compressed_data` | LargeBinary | gzip-compressed JSON (file dict) | + +**`Estimator`** — named result containers. + +| Column | Type | Description | +|---|---|---| +| `id` | Integer (PK) | | +| `simulation_id` | Integer (FK → Simulation) | | +| `name` | String | Estimator name | +| `file_name` | String | Original filename | +| `compressed_data` | LargeBinary | gzip-compressed metadata | + +**`Page`** — individual scoring dimensions within an estimator. + +| Column | Type | Description | +|---|---|---| +| `id` | Integer (PK) | | +| `estimator_id` | Integer (FK → Estimator) | | +| `page_number` | Integer | 0-based page index | +| `page_name` | String | Descriptive name | +| `page_dimension` | Integer | Number of dimensions | +| `compressed_data` | LargeBinary | gzip-compressed result data (axes, values) | + +**`Logfiles`** — simulation log output. + +| Column | Type | Description | +|---|---|---| +| `id` | Integer (PK) | | +| `simulation_id` | Integer (FK → Simulation) | | +| `compressed_data` | LargeBinary | gzip-compressed log text | + +### Cluster Table + +**`Cluster`** — registered HPC clusters. + +| Column | Type | Description | +|---|---|---| +| `id` | Integer (PK) | | +| `cluster_name` | String | e.g., `"ares"`, `"prometheus"` | + +## Data Compression + +All large binary fields use gzip compression: + +```python +from yaptide.persistence.models import compress, decompress + +# Store +model.compressed_data = compress(json.dumps(data).encode()) + +# Retrieve +data = json.loads(decompress(model.compressed_data).decode()) +``` + +This reduces storage for large simulation results (estimator pages can be several MB uncompressed). + +## Migration Workflow + +The project uses **Flask-Migrate** (Alembic) for schema changes. + +### Creating a New Migration + +```bash +# After modifying models.py +poetry run flask --app yaptide.application db migrate -m "Add new column" + +# Review the generated migration in migrations/versions/ +# Then apply: +poetry run flask --app yaptide.application db upgrade +``` + +### Development Workflow + +1. Modify `persistence/models.py` +2. Generate migration: `flask db migrate -m "description"` +3. Review the auto-generated migration script +4. Test locally: `flask db upgrade` +5. Commit the migration file alongside the model changes + +### Production Migration + +> Always back up the database before running migrations in production. + +```bash +# Backup +docker compose exec postgresql pg_dump -U yaptide yaptide > backup.sql + +# Apply migration +docker compose exec yaptide_flask flask --app yaptide.application db upgrade +``` + +### Testing Migrations + +To test a migration against a copy of the production database: + +1. Dump production: `pg_dump -U yaptide yaptide > prod_backup.sql` +2. Load into a test database: `psql -U yaptide test_db < prod_backup.sql` +3. Point `FLASK_SQLALCHEMY_DATABASE_URI` at the test DB +4. Run `flask db upgrade` +5. Verify the application works correctly + +## Database Access from Outside Docker + +When running the database in Docker, access it with pgAdmin or `psql`: + +```bash +# Direct psql access +docker compose exec postgresql psql -U yaptide yaptide + +# Or with the develop compose file (includes pgAdmin on port 9999) +docker compose -f docker-compose.yml -f docker-compose-develop.yml up -d +# Open http://localhost:9999, login with admin@admin.com / admin +``` diff --git a/src/content/docs/backend/docker-deployment.md b/src/content/docs/backend/docker-deployment.md new file mode 100644 index 0000000..8ac5634 --- /dev/null +++ b/src/content/docs/backend/docker-deployment.md @@ -0,0 +1,184 @@ +--- +title: Docker Deployment +description: Running the YAPTIDE backend with Docker Compose. +--- + +The backend deployment uses **Docker Compose** with 6 services. This is the recommended way to run the full stack. + +## Services + +```yaml +# Simplified view of docker-compose.yml +services: + redis: # Celery broker + result backend + postgresql: # Primary database + yaptide_flask: # Flask API server + yaptide_simulation_worker: # Celery simulation worker + yaptide_helper_worker: # Celery helper worker + nginx: # Reverse proxy with TLS +``` + +### Service Details + +| Service | Image | Port | Role | +|---|---|---|---| +| `redis` | `redis:8-alpine` | 6379 (internal) | Celery message broker and result backend | +| `postgresql` | `postgres:16-alpine` | 5432 (internal) | Primary database, persisted via named volume | +| `yaptide_flask` | `Dockerfile-flask` | 6000 (internal) | Flask API server | +| `yaptide_simulation_worker` | `Dockerfile-simulation-worker` | — | Runs simulator binaries via Celery | +| `yaptide_helper_worker` | `Dockerfile-helper-worker` | — | Batch job submission and cleanup via Celery | +| `nginx` | `Dockerfile-nginx` | 5000, 8443 | Reverse proxy with TLS termination | + +### Exposed Ports + +| Port | Protocol | Service | +|---|---|---| +| `5000` | HTTP | Nginx → Flask API | +| `8443` | HTTPS | Nginx → Flask API (TLS) | + +## Quick Start + +```bash +cd yaptide +docker compose up --build -d +``` + +Wait for all services to be healthy: + +```bash +docker compose ps +``` + +Create a user: + +```bash +docker compose exec yaptide_flask python -m yaptide.admin.db_manage add-user \ + --username admin --password admin123 +``` + +## Compose Variants + +### Standard (Production-like) + +```bash +docker compose up --build -d +``` + +### Fast Development + +Faster healthcheck intervals for quicker startup feedback: + +```bash +docker compose -f docker-compose.yml -f docker-compose.fast.yml up --build -d +``` + +> Requires Docker Engine v25+ for the fast healthcheck `start_interval` option. + +### Development with pgAdmin + +Adds a pgAdmin instance on port 9999: + +```bash +docker compose -f docker-compose.yml -f docker-compose-develop.yml up --build -d +``` + +Access pgAdmin at **http://localhost:9999** with: +- Email: `admin@admin.com` +- Password: `admin` + +Connect to PostgreSQL using: +- Host: `postgresql` +- Port: `5432` +- Database: `yaptide` +- Username/Password: from `POSTGRES_USER`/`POSTGRES_PASSWORD` env vars + +## Environment Variables + +Set these in a `.env` file in the `yaptide/` root or pass them via Docker: + +### Database + +| Variable | Default | Description | +|---|---|---| +| `POSTGRES_DB` | `yaptide` | Database name | +| `POSTGRES_USER` | `yaptide` | Database username | +| `POSTGRES_PASSWORD` | `yaptide` | Database password | + +### Application + +| Variable | Default | Description | +|---|---|---| +| `FLASK_SQLALCHEMY_DATABASE_URI` | (derived) | Full PostgreSQL connection string | +| `CELERY_BROKER_URL` | `redis://redis:6379/0` | Redis broker URL | +| `CELERY_RESULT_BACKEND` | `redis://redis:6379/0` | Redis result backend | +| `BACKEND_INTERNAL_URL` | `http://yaptide_flask:6000` | Internal URL for worker → Flask | +| `BACKEND_EXTERNAL_URL` | `https://localhost:8443` | Public-facing URL | +| `MAX_CORES` | (all) | CPU limit for simulation worker | +| `LOG_LEVEL_ROOT` | `INFO` | Logging verbosity | + +### Authentication + +| Variable | Description | +|---|---| +| `KEYCLOAK_BASE_URL` | Keycloak server URL | +| `KEYCLOAK_REALM` | Keycloak realm | +| `CERT_AUTH_URL` | PLGrid cert-auth service URL | + +### Simulator Storage (S3) + +| Variable | Description | +|---|---| +| `S3_ENDPOINT` | S3-compatible endpoint | +| `S3_ACCESS_KEY` | S3 access key | +| `S3_SECRET_KEY` | S3 secret key | +| `S3_ENCRYPTION_PASSWORD` | Binary encryption password | +| `S3_ENCRYPTION_SALT` | Binary encryption salt | +| `S3_SHIELDHIT_BUCKET` / `S3_SHIELDHIT_KEY` | SHIELD-HIT12A binary location | +| `S3_FLUKA_BUCKET` / `S3_FLUKA_KEY` | FLUKA binary location | +| `S3_TOPAS_BUCKET` / `S3_TOPAS_KEY` | TOPAS binary location | + +## TLS Configuration + +Nginx is configured with self-signed TLS certificates for development. The `Dockerfile-nginx` generates certificates at build time. + +``` +nginx.conf excerpt: + listen 8443 ssl; + ssl_protocols TLSv1.2 TLSv1.3; + ssl_certificate /etc/nginx/ssl/cert.pem; + ssl_certificate_key /etc/nginx/ssl/key.pem; +``` + +For production, replace the self-signed certificates with real ones: +- Mount certificates via Docker volumes +- Or use a reverse proxy like Traefik or Caddy in front of Nginx + +## Volume Persistence + +The PostgreSQL data is stored in a **named Docker volume** (`postgres_data`). This survives `docker compose down` but is removed with `docker compose down -v`. + +> To completely reset the database: `docker compose down -v && docker compose up --build -d` + +## Container Images on GHCR + +Pre-built images are published to **GitHub Container Registry** (GHCR). + +### Automated Publishing + +- **On push to main**: images are tagged with the Git SHA and `latest` +- **On tag push** (`v*`): images are tagged with the version number + +### Manual Pull + +```bash +docker pull ghcr.io/yaptide/yaptide-flask:latest +docker pull ghcr.io/yaptide/yaptide-simulation-worker:latest +docker pull ghcr.io/yaptide/yaptide-helper-worker:latest +docker pull ghcr.io/yaptide/yaptide-nginx:latest +``` + +### Retention Policy + +- `latest` and versioned tags: kept indefinitely +- SHA-tagged images: cleaned up after 30 days +- Untagged images: cleaned up weekly diff --git a/src/content/docs/backend/overview.md b/src/content/docs/backend/overview.md new file mode 100644 index 0000000..11a6426 --- /dev/null +++ b/src/content/docs/backend/overview.md @@ -0,0 +1,113 @@ +--- +title: Backend Overview +description: Architecture and structure of the YAPTIDE Flask backend. +--- + +The backend is a **Flask** API server that handles authentication, simulation job orchestration, and result persistence. It uses **Celery** for async task execution and **PostgreSQL** for storage. + +## Tech Stack + +| Component | Technology | +|---|---| +| Web framework | Flask + Flask-RESTful | +| Task queue | Celery (Redis broker, eventlet pool) | +| Database | PostgreSQL via SQLAlchemy + Flask-SQLAlchemy | +| Migrations | Flask-Migrate (Alembic) | +| Auth | JWT (PyJWT), Keycloak OIDC | +| Reverse proxy | Nginx (TLS) | +| Packaging | Poetry | + +## Directory Structure + +``` +yaptide/ +├── yaptide/ +│ ├── __init__.py +│ ├── application.py # Flask app factory +│ ├── routes/ +│ │ ├── main_routes.py # Route registration +│ │ ├── auth_routes.py # Native auth (register/login/refresh) +│ │ ├── keycloak_routes.py # Keycloak SSO auth +│ │ ├── simulation_routes.py # Jobs (direct + batch) +│ │ ├── result_routes.py # Results, estimators, logfiles +│ │ ├── user_routes.py # User simulation management +│ │ └── utils/ +│ │ └── decorators.py # @requires_auth decorator +│ ├── persistence/ +│ │ ├── models.py # SQLAlchemy models (12 tables) +│ │ └── db_methods.py # Database access layer +│ ├── celery/ +│ │ ├── simulation_worker.py # Celery app + run_single_simulation task +│ │ └── helper_worker.py # Batch submission + cleanup tasks +│ ├── batch/ +│ │ ├── batch_methods.py # SSH connection + Slurm job management +│ │ └── watcher_scripts/ # Scripts deployed to HPC clusters +│ ├── utils/ +│ │ ├── enums.py # PlatformType, EntityState, InputType +│ │ └── sim_utils.py # Simulation preparation utilities +│ └── admin/ +│ ├── db_manage.py # CLI: user management +│ └── simulators.py # CLI: simulator binary management +├── migrations/ +│ └── versions/ # Alembic migration scripts +├── tests/ +│ ├── conftest.py # Fixtures (Flask app, test client, sample data) +│ ├── integration/ # Full-stack tests +│ └── ... # Unit tests +├── pyproject.toml +├── pytest.ini +└── docker-compose.yml +``` + +## Core Concepts + +### Flask App Factory + +The app is created via `yaptide.application` using the Flask factory pattern. It initializes: +- SQLAlchemy database connection +- Flask-Migrate for schema migrations +- Flask-RESTful for REST endpoint routing +- CORS (when `FLASK_USE_CORS=true`) +- JWT secret key for token signing + +### Route Registration + +All routes are registered in `routes/main_routes.py`. Each route is a Flask-RESTful `Resource` class with `get()`, `post()`, `put()`, `delete()` methods. + +### Celery Workers + +Two Celery workers with separate queues: + +| Worker | Queue(s) | Responsibility | +|---|---|---| +| **simulation_worker** | `celery` | Run simulator binaries, monitor progress, merge results | +| **helper_worker** | `helper`, `helper-short` | Submit batch jobs to HPC, handle cleanup | + +Workers communicate back to Flask via HTTP callbacks (`POST /tasks`, `POST /results`). + +### Data Compression + +All large data (input files, simulation results, logs) is **gzip-compressed** before storage in PostgreSQL. The `compress()` and `decompress()` helpers handle this transparently. + +## Key Environment Variables + +| Variable | Description | +|---|---| +| `FLASK_SQLALCHEMY_DATABASE_URI` | PostgreSQL connection string | +| `CELERY_BROKER_URL` | Redis broker URL | +| `CELERY_RESULT_BACKEND` | Redis result backend URL | +| `BACKEND_INTERNAL_URL` | Flask URL for worker callbacks | +| `BACKEND_EXTERNAL_URL` | Public-facing URL | +| `FLASK_USE_CORS` | Enable CORS for local dev | +| `KEYCLOAK_BASE_URL` | Keycloak server URL | +| `KEYCLOAK_REALM` | Keycloak realm | +| `CERT_AUTH_URL` | PLGrid SSH cert service URL | +| `MAX_CORES` | CPU limit for simulation worker | +| `LOG_LEVEL_ROOT` | Logging verbosity | + +## Related Pages + +- [API Endpoints](/for_developers/backend/api-endpoints/) — walkthrough of all REST routes +- [Database](/for_developers/backend/database/) — data model and migrations +- [Simulation Lifecycle](/for_developers/backend/simulation-lifecycle/) — job state machine +- [Docker Deployment](/for_developers/backend/docker-deployment/) — containerized setup diff --git a/src/content/docs/backend/simulation-lifecycle.md b/src/content/docs/backend/simulation-lifecycle.md new file mode 100644 index 0000000..0f0793c --- /dev/null +++ b/src/content/docs/backend/simulation-lifecycle.md @@ -0,0 +1,190 @@ +--- +title: Simulation Lifecycle +description: Job and task state machines for YAPTIDE simulations. +--- + +Every simulation goes through a defined state machine. This page documents the states, transitions, and the differences between direct (Celery) and batch (Slurm) execution paths. + +## Job States + +``` +UNKNOWN ─────> PENDING ─────> RUNNING ─────> MERGING_QUEUED ─────> MERGING_RUNNING ─────> COMPLETED + │ │ │ + │ │ │ + └──────────────┴───────────────────────────────────────────────────────> FAILED + │ │ + └──────────────┴───────────────────────────────────────────────────────> CANCELED +``` + +| State | Description | +|---|---| +| `UNKNOWN` | Initial state, before any processing | +| `PENDING` | Job accepted, tasks being created | +| `RUNNING` | At least one task is actively simulating | +| `MERGING_QUEUED` | All tasks complete, merge task is waiting in the Celery queue | +| `MERGING_RUNNING` | Merge task is actively averaging results | +| `COMPLETED` | Results stored, job finished successfully | +| `FAILED` | One or more tasks failed, or the merge failed | +| `CANCELED` | User or system canceled the job | + +These states are defined in `utils/enums.py` as the `EntityState` enum. + +## Task States + +Each job contains N tasks (one per parallel simulation run). Tasks have their own state: + +| State | Description | +|---|---| +| `PENDING` | Task created, waiting for a worker | +| `RUNNING` | Simulator binary is executing | +| `COMPLETED` | Simulation finished, output available | +| `FAILED` | Simulator crashed or timed out | +| `CANCELED` | Task was revoked | + +## Direct Execution (Celery) + +### Submission + +```python +POST /jobs/direct + → Create CelerySimulationModel (state: PENDING) + → Create N CeleryTaskModel rows (state: PENDING) + → Convert editor JSON → simulator input files + → Dispatch Celery chord: + group(run_single_simulation × N) | get_job_results +``` + +### Task Execution + +Each `run_single_simulation` task: + +1. Receives input files and a task index +2. Creates a temporary directory +3. Writes input files +4. Spawns the simulator binary (`shieldhit`, `fluka_sim`) as a subprocess +5. Starts a **monitoring thread** that reads stdout/logfiles for progress +6. Periodically POSTs progress to `POST /tasks`: + ```json + { + "task_id": 0, + "simulated_primaries": 5000, + "requested_primaries": 10000, + "estimated_time": 42 + } + ``` +7. On completion, returns the output files (estimator data) + +### Merge Step + +When all N tasks complete, the `get_job_results` **callback task** runs: + +1. Collects estimator data from all N tasks +2. **Averages** the results (weighted by primaries per task) +3. Compresses and stores: `EstimatorModel` → `PageModel` +4. Updates job state to `COMPLETED` + +If any task fails, the merge is skipped and the job state is set to `FAILED`. + +### Cancellation + +```python +DELETE /jobs/direct?job_id= + → Revoke all Celery tasks (terminate=True) + → Set job state to CANCELED +``` + +### Task Time Limit + +Simulation tasks have a **10-hour hard time limit** (configured in the Celery worker). Tasks exceeding this are killed. + +## Batch Execution (Slurm via SSH) + +### Submission + +```python +POST /jobs/batch + → Create BatchSimulationModel (state: PENDING) + → Dispatch helper_worker.submit_job task +``` + +The `submit_job` task on the helper worker: + +1. Connects to the HPC cluster via **SSH** (using Fabric and the user's PLGrid SSH certificate from `KeycloakUserModel`) +2. Creates a remote working directory +3. Uploads: + - Compressed simulation input files + - A **watcher script** (monitors each array task) + - A **data-sender script** (POSTs results back to YAPTIDE) +4. Submits a **Slurm array job**: + ```bash + sbatch --array=0-N-1 run_simulation.sh + ``` +5. Submits a **collect job** (depends on the array job): + ```bash + sbatch --dependency=afterok: collect_results.sh + ``` +6. Stores the array and collect Slurm job IDs in `BatchSimulationModel` + +### Progress Monitoring + +The **watcher script** on the cluster: +- Runs alongside each array task +- Monitors simulator output (logfiles, stdout) +- POSTs progress updates to the YAPTIDE backend: + ``` + POST /tasks + Authorization: Bearer + ``` + +### Status Polling + +When the frontend polls `GET /jobs/batch?job_id=`, the backend: +1. Returns cached task states from the database +2. Optionally queries `sacct` on the cluster via SSH to update Slurm job status + +### Result Collection + +The **collect job** on the cluster: +1. Runs after all array tasks complete +2. Gathers output files from each task directory +3. Averages/merges results +4. POSTs the final results to `POST /results` +5. The backend stores them as `EstimatorModel` → `PageModel` + +### Cancellation + +```python +DELETE /jobs/batch?job_id= + → SSH to cluster + → scancel + → Set job state to CANCELED +``` + +## Worker Communication + +Both execution paths use **HTTP callbacks** for workers to report state back to Flask: + +| Endpoint | Who Calls It | Purpose | +|---|---|---| +| `POST /tasks` | Simulation worker / cluster watcher | Update task progress (primaries, estimated time) | +| `POST /results` | Merge task / collect job | Store final results | +| `POST /jobs` | Helper worker | Update job-level state | + +These internal endpoints are authenticated with a **simulation update key** — a 7-day JWT generated at job submission and stored (hashed) in the `SimulationModel`. + +## Polling Pattern + +The frontend polls for job status using this pattern: + +``` +1. POST /jobs/direct → { job_id } +2. Loop: + GET /jobs/direct?job_id= + → If RUNNING: show progress bars (primaries/estimated_time per task) + → If COMPLETED: GET /results?job_id= → render plots + → If FAILED: show error + → If CANCELED: show cancellation notice + Wait 2–5 seconds, repeat +``` + +The polling interval increases as the simulation runs longer to reduce server load. diff --git a/src/content/docs/backend/simulator-management.md b/src/content/docs/backend/simulator-management.md new file mode 100644 index 0000000..6e0dfa5 --- /dev/null +++ b/src/content/docs/backend/simulator-management.md @@ -0,0 +1,171 @@ +--- +title: Simulator Management +description: How simulator binaries are stored, encrypted, and deployed in YAPTIDE. +--- + +YAPTIDE runs external simulator binaries (SHIELD-HIT12A, FLUKA, TOPAS) as subprocesses. These binaries are managed through an S3-based storage and encryption system. + +## Architecture + +``` +S3 Bucket +├── shieldhit/ +│ └── shieldhit-encrypted.tar.gz ← AES-encrypted archive +├── fluka/ +│ └── fluka-encrypted.tar.gz +└── topas/ + └── topas-encrypted.tar.gz + + │ + │ Download at container startup + ▼ + +Simulation Worker Container +├── /usr/local/bin/shieldhit +├── /usr/local/bin/fluka_sim +└── /usr/local/bin/topas +``` + +## Demo vs Production + +| Mode | Source | Encryption | +|---|---|---| +| **Demo** | Downloaded from public SHIELD-HIT12A website | No encryption | +| **Production** | Downloaded from private S3 bucket | AES-encrypted with password + salt | + +The simulation worker startup script (`run_simulation_worker.sh`) checks for S3 credentials: +- If S3 credentials are set → download from S3, decrypt, extract +- If no S3 credentials → fall back to the public demo download + +## S3 Storage + +Encrypted simulator archives are stored in S3-compatible storage (e.g., AWS S3, MinIO). + +### Environment Variables + +| Variable | Description | +|---|---| +| `S3_ENDPOINT` | S3 endpoint URL | +| `S3_ACCESS_KEY` | S3 access key | +| `S3_SECRET_KEY` | S3 secret key | +| `S3_ENCRYPTION_PASSWORD` | AES encryption password | +| `S3_ENCRYPTION_SALT` | AES encryption salt | +| `S3_SHIELDHIT_BUCKET` | Bucket name for SHIELD-HIT12A | +| `S3_SHIELDHIT_KEY` | Object key for SHIELD-HIT12A archive | +| `S3_FLUKA_BUCKET` | Bucket name for FLUKA | +| `S3_FLUKA_KEY` | Object key for FLUKA archive | +| `S3_TOPAS_BUCKET` | Bucket name for TOPAS | +| `S3_TOPAS_KEY` | Object key for TOPAS archive | + +## CLI Commands + +The `yaptide.admin.simulators` module provides CLI commands for managing simulator binaries. + +### Download + +```bash +# Download SHIELD-HIT12A from S3 (or demo fallback) +poetry run python -m yaptide.admin.simulators download-shieldhit + +# Download FLUKA from S3 +poetry run python -m yaptide.admin.simulators download-fluka + +# Download TOPAS from S3 +poetry run python -m yaptide.admin.simulators download-topas +``` + +### Encrypt + +Encrypt a local simulator binary for S3 upload: + +```bash +poetry run python -m yaptide.admin.simulators encrypt \ + --input /path/to/shieldhit \ + --output shieldhit-encrypted.tar.gz \ + --password "your-encryption-password" \ + --salt "your-encryption-salt" +``` + +### Decrypt + +Decrypt an archived binary (for testing): + +```bash +poetry run python -m yaptide.admin.simulators decrypt \ + --input shieldhit-encrypted.tar.gz \ + --output /path/to/shieldhit \ + --password "your-encryption-password" \ + --salt "your-encryption-salt" +``` + +## Uploading a New Simulator Version + +Step-by-step example for uploading a new SHIELD-HIT12A release: + +### 1. Obtain the Binary + +Get the new simulator binary (e.g., `shieldhit` Linux x86_64 ELF). + +### 2. Encrypt + +```bash +poetry run python -m yaptide.admin.simulators encrypt \ + --input ./shieldhit \ + --output shieldhit-v1.2.0-encrypted.tar.gz \ + --password "$S3_ENCRYPTION_PASSWORD" \ + --salt "$S3_ENCRYPTION_SALT" +``` + +### 3. Upload to S3 + +Use the AWS CLI, MinIO client, or any S3-compatible tool: + +```bash +aws s3 cp shieldhit-v1.2.0-encrypted.tar.gz \ + s3://$S3_SHIELDHIT_BUCKET/$S3_SHIELDHIT_KEY \ + --endpoint-url $S3_ENDPOINT +``` + +### 4. Verify + +Restart the simulation worker (or trigger a new container deployment). The worker downloads and decrypts the binary at startup: + +```bash +docker compose restart yaptide_simulation_worker +``` + +Check the worker logs: + +```bash +docker compose logs yaptide_simulation_worker | grep -i "shieldhit" +``` + +### 5. Test + +Submit a simulation and verify it runs successfully with the new binary version. + +## Container Startup Flow + +When the simulation worker container starts (`run_simulation_worker.sh`): + +``` +1. Check for S3 environment variables + ├── If set: + │ ├── Download encrypted archive from S3 + │ ├── Decrypt with password + salt + │ ├── Extract binary to /usr/local/bin/ + │ └── Set executable permissions + └── If not set: + ├── Download demo SHIELD-HIT12A from public URL + └── Extract to /usr/local/bin/ + +2. Verify binary is executable +3. Start Celery simulation worker +``` + +## Security + +- Binaries are **AES-encrypted** at rest in S3 +- Encryption credentials are passed as environment variables (not committed to code) +- The container never stores unencrypted binaries on persistent volumes — they exist only in the container's ephemeral filesystem +- S3 bucket policies should restrict access to the deployment pipeline only diff --git a/src/content/docs/backend/testing.md b/src/content/docs/backend/testing.md new file mode 100644 index 0000000..cf24c64 --- /dev/null +++ b/src/content/docs/backend/testing.md @@ -0,0 +1,186 @@ +--- +title: Backend Testing +description: Test structure, fixtures, and how to write tests for the YAPTIDE backend. +--- + +The backend uses **pytest** with an in-memory SQLite database and an in-memory Celery broker. No external services are required to run tests. + +## Running Tests + +```bash +cd yaptide +poetry run pytest +``` + +With verbose output: + +```bash +poetry run pytest -v +``` + +Run a specific test file: + +```bash +poetry run pytest tests/test_database.py +``` + +Run a specific test: + +```bash +poetry run pytest tests/test_database.py::test_create_user -v +``` + +## Test Configuration + +Test settings are defined in `pytest.ini`: + +```ini +[pytest] +testpaths = tests +python_files = test_*.py +python_functions = test_* +``` + +Key differences from production: +- **Database**: in-memory SQLite (no PostgreSQL required) +- **Celery broker**: `memory://` (no Redis required) +- **Celery backend**: `cache+memory://` (no Redis required) + +## Fixtures + +Fixtures are defined in `tests/conftest.py` and provide the test infrastructure. + +### `app` + +Creates a Flask application configured for testing: + +```python +@pytest.fixture +def app(): + # Creates app with SQLite in-memory DB + # Applies all migrations + # Returns the configured Flask app +``` + +### `client` + +A Flask test client for making HTTP requests: + +```python +@pytest.fixture +def client(app): + return app.test_client() +``` + +### Sample Data Fixtures + +Pre-built JSON payloads for simulation submission: + +| Fixture | Description | +|---|---| +| `shieldhit_editor_payload` | SHIELD-HIT12A simulation from editor JSON | +| `shieldhit_files_payload` | SHIELD-HIT12A simulation from raw input files | +| `fluka_editor_payload` | FLUKA simulation from editor JSON | +| `fluka_files_payload` | FLUKA simulation from raw input files | + +These fixtures load JSON from `tests/res/` resource files. + +## Test Categories + +### Unit Tests + +| File | What it Tests | +|---|---| +| `test_main.py` | App creation, health check endpoint (`GET /`) | +| `test_database.py` | SQLAlchemy model CRUD (create, read, update, delete) | +| `test_keycloak_tokens.py` | Keycloak token validation and parsing | +| `test_encrypt_decrypt.py` | AES encryption/decryption for simulator binaries | +| `test_prepare_simulation.py` | Input conversion logic (editor JSON → simulator files) | +| `test_download_shieldhit.py` | SHIELD-HIT12A binary download from S3 | +| `test_download_fluka.py` | FLUKA binary download from S3 | +| `test_download_topas.py` | TOPAS binary download from S3 | + +### Integration Tests + +Located in `tests/integration/`: + +| File | What it Tests | +|---|---| +| `test_run_simulation.py` | Full simulation lifecycle: submit → run → merge → retrieve results | +| `test_cancel_simulation.py` | Simulation cancellation flow | +| `test_user_management.py` | User registration, login, session management | +| `test_celery_tasks.py` | Celery task dispatch and completion | +| `test_simulation_deletion.py` | Simulation and result cleanup | +| `test_environment.py` | Environment variable handling | + +## Writing New Tests + +### Basic Pattern + +```python +def test_health_check(client): + """Test that the health check endpoint returns 200.""" + response = client.get("/") + assert response.status_code == 200 +``` + +### Authenticated Tests + +Most endpoints require authentication. Use the login fixture: + +```python +def test_submit_simulation(client): + # Register and log in + client.put("/auth/register", json={ + "username": "testuser", + "password": "testpass" + }) + client.post("/auth/login", json={ + "username": "testuser", + "password": "testpass" + }) + + # Now authenticated via cookies + response = client.post("/jobs/direct", json={ + "sim_data": { ... }, + "ntasks": 1, + "sim_type": "shieldhit" + }) + assert response.status_code == 202 +``` + +### Database Tests + +```python +def test_create_simulation(app): + with app.app_context(): + from yaptide.persistence.models import SimulationModel, UserModel + + user = UserModel(username="test") + db.session.add(user) + db.session.commit() + + sim = SimulationModel( + job_id="test-123", + user_id=user.id, + sim_type="shieldhit" + ) + db.session.add(sim) + db.session.commit() + + assert SimulationModel.query.count() == 1 +``` + +## Pre-Commit Hooks + +The project uses pre-commit for code quality: + +```bash +poetry run pre-commit install +poetry run pre-commit run --all-files +``` + +Hooks include: +- **YAPF** — Python formatter (120-char line length) +- **pycodestyle** — PEP 8 style checking +- Standard pre-commit hooks (trailing whitespace, end of file, etc.) diff --git a/src/content/docs/contributing/code-style.md b/src/content/docs/contributing/code-style.md new file mode 100644 index 0000000..fbbd2b5 --- /dev/null +++ b/src/content/docs/contributing/code-style.md @@ -0,0 +1,91 @@ +--- +title: Code Style +description: Formatting and linting standards across YAPTIDE repositories. +--- + +Each repository enforces consistent code style through automated tools. CI will reject changes that don't conform. + +## General Conventions + +### Naming + +Use standard naming conventions for Python and TypeScript. Use a `_unit` suffix in time-related variables e.g. `max_idle_seconds`. + +### Type Hints + +Use type hints on all public functions both in Python and in TypeScript e.g.: + +```py +def get_user(id: int) -> User: +``` + +```ts +export const getUser = (id: number): User => { ... } +``` + +### Comments + +Use **docstrings** or **JSDocs** for all complex functions and classes. Each docstring must include an `Args:` section that explicitly defines the purpose of every parameter e.g.: + +```py +def log_generator(thefile: TextIOWrapper, + event: threading.Event = None, + max_idle_seconds: float = 3600, + polling_interval_seconds: float = 1) -> Iterator[str]: + """ + Generator equivalent to `tail -f` Linux command. + Yields new lines appended to the end of the file. + Main purpose is monitoring of the log files. + + Args: + thefile: File object to read from. + event: Threading event to signal when to stop the generator. + max_idle_seconds: Maximum time to wait for new data before raising TimeoutError. + polling_interval_seconds: Interval between successive file polls while no new data is available. + """ +``` + +:::note +While older segments of the codebase may lack this detail, we are enforcing this format moving forward to improve readability and developer onboarding. +::: + +## Python (Backend & Converter) + +### Pre-Commit Hooks + +Both Python repos use [pre-commit](https://pre-commit.com/) to run checks before every commit: + +```bash +poetry run pre-commit install # Install hooks (once) +poetry run pre-commit run --all-files # Run manually +``` + +The `.pre-commit-config.yaml` typically includes: + +| Hook | Purpose | +|---|---| +| `yapf` | Code formatting | +| `isort` | Import ordering | +| `trailing-whitespace` | Remove trailing spaces | +| `end-of-file-fixer` | Ensure newline at end of file | +| `check-yaml` | Validate YAML files | +| `check-merge-conflict` | Detect leftover merge markers | + +## TypeScript (Frontend) + +### Linter — ESLint + +The frontend uses [ESLint](https://eslint.org/) with a configuration extending CRA defaults: + +```bash +npm run lint # Check for issues +npm run lint -- --fix # Auto-fix where possible +``` + +### Formatter — Prettier + +[Prettier](https://prettier.io/) handles all formatting for TypeScript, JSON, CSS, and Markdown. To apply formatting use: + +```bash +npm run format +``` diff --git a/src/content/docs/contributing/glossary.md b/src/content/docs/contributing/glossary.md new file mode 100644 index 0000000..439ed10 --- /dev/null +++ b/src/content/docs/contributing/glossary.md @@ -0,0 +1,106 @@ +--- +title: Glossary +description: Key terms used throughout the YAPTIDE project. +--- + +Quick reference for domain-specific terms that appear across the codebase and documentation. + +## Architecture Terms + +**Project JSON** +: The canonical data format exchanged between the frontend editor and the backend/converter. Contains all simulation parameters: beam, geometry, materials, detectors, scoring, and physics settings. + +**Parser** +: A converter class that translates project JSON into a specific simulator's input format. Each engine has its own parser (e.g., `ShieldhitParser`, `FlukaParser`). + +**Direct Execution** +: Running simulations on the backend server itself, using Celery workers. Faster for small jobs. + +**Batch Execution** +: Submitting simulations to an HPC cluster (e.g., PLGrid/Prometheus) via SSH and Slurm. For large-scale computations. + +**Celery Worker** +: A background process that executes simulation tasks. YAPTIDE uses two types: **simulation workers** (run the simulator binary) and **helper workers** (poll batch job status). + +## Frontend Terms + +**YaptideEditor** +: The central TypeScript class that manages the 3D scene, all object managers, and the serialization state. Extends Three.js `EventDispatcher`. + +**Signal** +: A Three.js event used for communication between editor components. Similar to an event emitter pattern. + +**Command** +: An undoable action in the editor (add object, change property, etc.). All mutations go through the command system to support undo/redo. + +**ServiceTree** +: The top-level React component tree that provides all context providers (auth, simulation, backend connection) to the application. + +**Pyodide** +: A Python runtime compiled to WebAssembly. Used to run the YAPTIDE converter directly in the browser without a backend. + +## Simulation Engines + +**SHIELD-HIT12A** +: A Monte Carlo particle transport code specializing in ion beam therapy. The most complete engine in YAPTIDE. + +**FLUKA** +: A general-purpose Monte Carlo code for particle transport developed at CERN. Uses a card-based input format. + +**Geant4** +: A toolkit for simulating particle passage through matter, developed at CERN. Uses GDML for geometry and macro files for configuration. + +**TOPAS** +: A Monte Carlo tool built on Geant4, focused on proton therapy. Experimental support in YAPTIDE. + +## Infrastructure Terms + +**Poetry** +: Python dependency manager used by the backend and converter. Replaces pip + requirements.txt with `pyproject.toml` + `poetry.lock`. + +**Keycloak** +: An open-source identity provider. YAPTIDE uses it for SSO authentication, especially for PLGrid HPC access. + +**PLGrid** +: Polish national computing infrastructure. YAPTIDE can submit batch simulations to PLGrid clusters via Slurm. + +**Slurm** +: A workload manager for HPC clusters. YAPTIDE generates Slurm job scripts for batch execution. + +**GDML** +: Geometry Description Markup Language — an XML schema used by Geant4 for geometry definitions. + +## Simulation Concepts + +**CSG (Constructive Solid Geometry)** +: A modeling technique that builds complex shapes by combining simple primitives (boxes, cylinders, spheres) using boolean operations — union, subtraction, and intersection. + +**Zone** +: A region of space defined by CSG operations on figures. Each zone is assigned a material. In FLUKA, zones are called "regions." + +**Figure** +: A geometric primitive (box, cylinder, sphere) with position and rotation. Figures are the building blocks of zones. + +**Black-Hole Boundary** +: A surrounding zone filled with "black hole" material that absorbs all particles exiting the simulation geometry. Auto-generated by the converter. + +**Primary / Primaries** +: The initial particles in a simulation (e.g. protons, carbon ions). The number of primaries determines statistical accuracy. + +**Estimator / Detector** +: A scoring mesh that tallies physical quantities (dose, fluence, LET) in a defined region of space. Types include mesh (Cartesian grid), cylindrical, and zone-based. + +**Scoring** +: The process of recording (tallying) physical quantities during particle transport. A scoring definition ties a quantity to a detector. + +**Page / Output** +: A single scored quantity on a single detector. One simulation can produce multiple pages of results. + +**LET (Linear Energy Transfer)** +: Energy deposited by a charged particle per unit path length. Typically measured in keV/µm. + +**Fluence** +: Number of particles passing through a unit area. Measured in particles/cm². + +**Dose** +: Energy deposited per unit mass. Measured in Gray (Gy). diff --git a/src/content/docs/contributing/guide.md b/src/content/docs/contributing/guide.md new file mode 100644 index 0000000..2897a8e --- /dev/null +++ b/src/content/docs/contributing/guide.md @@ -0,0 +1,67 @@ +--- +title: Contributing +description: How to contribute to the YAPTIDE project. +--- + +YAPTIDE is spread across multiple repositories. This guide covers the shared workflow for contributing to any of them. + +## Repositories + +| Repository | Description | +|---|---| +| [`yaptide`](https://github.com/yaptide/yaptide) | Backend — Flask API, Celery workers, database | +| [`ui`](https://github.com/yaptide/ui) | Frontend — React editor, Three.js viewport | +| [`converter`](https://github.com/yaptide/converter) | Python converter — JSON to simulator input | + +## Pull Request Workflow + +### 1. Fork and Branch + +```bash +git clone https://github.com/yaptide/.git +cd +git checkout -b feature/my-change +``` + +### 2. Make Changes + +- Write code following the [code style guide](/for_developers/contributing/code-style/). +- Add or update tests. +- Update documentation if behaviour changes. + +### 3. Push and Open PR + +```bash +git push origin feature/my-change +``` + +Open a pull request on GitHub against `master`. Include: + +- **What** the change does (one sentence) +- **Why** it's needed +- **How** to test it +- Link to any related issues + +### 4. Code Review + +- At least one approval is required before merge. +- CI must pass: tests, linting, type checks. + +## Issue Tracking + +Issues are tracked in the respective GitHub repositories. When opening an issue: + +- Use a descriptive title. +- Include steps to reproduce (for bugs). +- Include the expected vs actual behaviour. +- Tag with appropriate labels (`bug`, `enhancement`, `docs`). + +## CI / CD + +Each repository has GitHub Actions workflows that run on pull requests: + +| Check | Backend | Frontend | Converter | +|---|---|---|---| +| Unit tests | `pytest` | `jest` | `pytest` | +| Linting | `pre-commit` | `eslint` | `pre-commit` | +| Build | Docker image | `npm run build` | — | diff --git a/src/content/docs/converter/adding-a-simulator.md b/src/content/docs/converter/adding-a-simulator.md new file mode 100644 index 0000000..d05ec20 --- /dev/null +++ b/src/content/docs/converter/adding-a-simulator.md @@ -0,0 +1,222 @@ +--- +title: Adding a Simulator +description: How-to guide for implementing a new simulation engine backend. +--- + +This guide walks through adding support for a new Monte Carlo simulation engine to the YAPTIDE converter. You will create a parser that translates the editor JSON into your engine's input format. + +## Prerequisites + +- Familiarity with the [conversion flow](/for_developers/converter/conversion-flow/) +- Working dev setup (see [converter dev setup](/for_developers/get-started/dev-setup-converter/)) +- Understanding of your target engine's input file format + +## Step 1 — Create the Parser Directory + +Add a new directory under `converter/converter/`: + +``` +converter/converter/ +└── mynewengine/ + ├── __init__.py + └── parser.py +``` + +## Step 2 — Implement the Parser Class + +Create `parser.py` and extend the `Parser` base class: + +```python +"""Parser for MyNewEngine input format.""" + +from pathlib import Path +from converter.common import Parser + + +class MyNewEngineParser(Parser): + """Translates YAPTIDE editor JSON into MyNewEngine input files.""" + + def __init__(self): + self.beam_config = None + self.geo_config = None + # Add whatever internal state you need + + def parse_configs(self, json_data: dict) -> None: + """Parse the editor JSON and populate internal state.""" + # Extract from json_data following the project JSON schema: + # json_data["beam"] → beam parameters + # json_data["figureManager"]["figures"] → geometry primitives + # json_data["zoneManager"]["zones"] → CSG zone operations + # json_data["materialManager"]["materials"] → material definitions + # json_data["detectorManager"]["detectors"] → scoring detectors + # json_data["scoringManager"]["outputs"] → scored quantities + self.beam_config = self._parse_beam(json_data.get("beam", {})) + self.geo_config = self._parse_geometry(json_data) + + def save_configs(self, output_dir: Path) -> None: + """Write generated files to the output directory.""" + output_dir.mkdir(parents=True, exist_ok=True) + + beam_file = output_dir / "beam_config.inp" + beam_file.write_text(self._render_beam()) + + geo_file = output_dir / "geometry.inp" + geo_file.write_text(self._render_geometry()) + + def get_configs_json(self) -> dict: + """Return {filename: content} dict without writing files.""" + return { + "beam_config.inp": self._render_beam(), + "geometry.inp": self._render_geometry(), + } + + # --- Private helpers --- + + def _parse_beam(self, beam_data: dict): + """Convert beam JSON to internal representation.""" + # Implement based on your engine's requirements + pass + + def _parse_geometry(self, json_data: dict): + """Convert figures + zones to internal representation.""" + pass + + def _render_beam(self) -> str: + """Render beam config as a string in engine's format.""" + pass + + def _render_geometry(self) -> str: + """Render geometry as a string in engine's format.""" + pass +``` + +### Key Points + +- **`parse_configs`** receives the full project JSON. Extract what you need. +- **`save_configs`** writes files to a directory (used by the CLI and workers). +- **`get_configs_json`** returns a dictionary of filename → file content (used by the API and Pyodide preview). +- Both output methods must produce **identical** content. + +## Step 3 — Register the Parser + +Open `converter/api.py` and add your parser to the factory function: + +```python +from converter.mynewengine.parser import MyNewEngineParser + +def get_parser_from_str(parser_type: str) -> Parser: + if parser_type == "shieldhit": + return ShieldhitParser() + elif parser_type == "fluka": + return FlukaParser() + elif parser_type == "geant4": + return Geant4Parser() + elif parser_type == "topas": + return TopasParser() + elif parser_type == "mynewengine": # Add this + return MyNewEngineParser() + else: + raise ValueError(f"Unknown parser type: {parser_type}") +``` + +## Step 4 — Add Golden-File Tests + +Create test files and expected output in the test directory: + +``` +tests/mynewengine/ +├── __init__.py +├── test_mynewengine.py +└── test_data/ + ├── sample_project.json # Input project JSON + ├── expected_beam_config.inp # Expected beam output + └── expected_geometry.inp # Expected geometry output +``` + +Write the test: + +```python +"""Golden-file tests for MyNewEngine parser.""" + +import json +from pathlib import Path + +from converter.api import get_parser_from_str + +TEST_DIR = Path(__file__).parent / "test_data" + + +def test_beam_output(): + """Verify beam config matches golden file.""" + with open(TEST_DIR / "sample_project.json") as f: + project = json.load(f) + + parser = get_parser_from_str("mynewengine") + parser.parse_configs(project) + configs = parser.get_configs_json() + + expected = (TEST_DIR / "expected_beam_config.inp").read_text() + assert configs["beam_config.inp"] == expected + + +def test_geometry_output(): + """Verify geometry matches golden file.""" + with open(TEST_DIR / "sample_project.json") as f: + project = json.load(f) + + parser = get_parser_from_str("mynewengine") + parser.parse_configs(project) + configs = parser.get_configs_json() + + expected = (TEST_DIR / "expected_geometry.inp").read_text() + assert configs["geometry.inp"] == expected +``` + +Run tests: + +```bash +cd converter +poetry run pytest tests/mynewengine/ -v +``` + +## Step 5 — Handle Geometry Correctly + +The trickiest part of any converter is geometry. YAPTIDE uses a CSG (Constructive Solid Geometry) model: + +1. **Figures** define primitives (boxes, cylinders, spheres) with position and rotation. +2. **Zones** combine figures using boolean operations — a zone is defined as an intersection of included figures minus excluded figures. +3. Each zone is assigned a **material**. +4. A **black-hole boundary** surrounds the entire geometry. The converter must auto-generate this. + +Use the existing `SolidFigure` classes from `converter/solid_figures.py`: + +```python +from converter.solid_figures import parse_figure + +# Parse a figure dict from the JSON +figure = parse_figure(figure_json) +# Returns a BoxFigure, CylinderFigure, or SphereFigure instance + +# Expand for black-hole boundary +expanded = figure.expand(margin=10.0) +``` + +Study the SHIELD-HIT12A parser's geometry handling as the most complete reference implementation. + +## Step 6 — Update Documentation + +1. Add your engine to the supported engines table in the converter overview. +2. Create an engine-specific page under `converter/` following the pattern of existing engine docs. +3. Add the page to the sidebar in `astro.config.mjs`. + +## Checklist + +- [ ] Parser class extends `Parser` with all three abstract methods +- [ ] Registered in `get_parser_from_str()` factory +- [ ] Golden-file tests for all output files +- [ ] Black-hole boundary generation +- [ ] Material assignment to zones +- [ ] Beam configuration +- [ ] Detector/scoring handling (if the engine supports it) +- [ ] CLI tested: `converter -i test.json -o mynewengine -d out/` +- [ ] Documentation updated diff --git a/src/content/docs/converter/conversion-flow.md b/src/content/docs/converter/conversion-flow.md new file mode 100644 index 0000000..7d4e57e --- /dev/null +++ b/src/content/docs/converter/conversion-flow.md @@ -0,0 +1,175 @@ +--- +title: Conversion Flow +description: End-to-end walkthrough of how editor JSON becomes simulator input. +--- + +The conversion pipeline is a four-stage process: **resolve a parser → parse JSON → extract files → write output**. Every simulator follows this exact pipeline. + +## Pipeline Overview + +``` +JSON payload + │ + ▼ +get_parser_from_str("shieldhit") ──> ShieldhitParser instance + │ + ▼ +parser.parse_configs(json_data) ──> internal dataclasses populated + │ + ▼ +parser.get_configs_json() ──> { "beam.dat": "...", "geo.dat": "...", ... } + │ OR +parser.save_configs(output_dir) ──> files written to disk +``` + +## Stage 1 — Resolve the Parser + +`converter/api.py` exposes the factory function: + +```python +from converter.api import get_parser_from_str + +parser = get_parser_from_str("shieldhit") +# Returns a ShieldhitParser instance +``` + +The mapping is straightforward: + +| String | Parser Class | +|---|---| +| `"shieldhit"` | `ShieldhitParser` | +| `"fluka"` | `FlukaParser` | +| `"geant4"` | `Geant4Parser` | +| `"topas"` | `TopasParser` | + +An unrecognised string raises `ValueError`. + +## Stage 2 — Parse the JSON + +```python +parser.parse_configs(json_data) +``` + +This is where the real work happens. The parser walks through the JSON payload and populates internal data structures: + +1. **Beam** — energy, particle type, shape, divergence +2. **Materials** — element composition, density, from a material library +3. **Figures** — 3D geometry primitives (box, cylinder, sphere) with position/rotation +4. **Zones** — boolean CSG operations on figures (union, subtraction, intersection) +5. **Detectors** — scoring meshes (mesh, cylinder, zone) +6. **Scoring** — quantities to score (dose, fluence, LET, etc.) tied to detectors +7. **Physics** — delta-ray production, energy thresholds, nuclear reactions + +The JSON keys correspond to the top-level groups described in the [project JSON schema](/for_developers/architecture/project-json-schema/). + +## Stage 3 — Extract Output + +Two extraction modes exist: + +### In-memory (for frontend / API) + +```python +files: dict = parser.get_configs_json() +# Returns {"beam.dat": "file contents...", "geo.dat": "file contents...", ...} +``` + +This is used by: +- The backend API when returning files for preview +- The Pyodide converter running in the browser + +### To disk (for CLI / workers) + +```python +from pathlib import Path +parser.save_configs(Path("output/")) +``` + +This creates the files on the filesystem. The simulation worker uses this mode before handing the directory to the simulator binary. + +## Stage 4 — What Gets Written + +Each engine produces a different set of files. See the engine-specific pages for full format details. + +### SHIELD-HIT12A + +| File | Content | +|---|---| +| `beam.dat` | Beam parameters (energy, particle, shape, direction) | +| `mat.dat` | Material definitions (ICRU numbers, custom compositions) | +| `geo.dat` | Geometry (figures → zones → medium assignments, black-hole boundary) | +| `detect.dat` | Scoring definitions (detectors, quantities, filters, output units) | + +### FLUKA + +| File | Content | +|---|---| +| `fl_sim.inp` | Single monolithic input file containing all configuration as "cards" | + +### Geant4 + +| File | Content | +|---|---| +| `geometry.gdml` | Geometry Description Markup Language file | +| `run.mac` | Geant4 macro file (beam, physics, scoring) | + +## Entry Points + +### CLI + +Run the converter from the command line: + +```bash +converter --help +converter -i project.json -o shieldhit -d output/ +``` + +| Argument | Description | +|---|---| +| `-i`, `--input` | Path to a YAPTIDE project JSON file | +| `-o`, `--output_type` | Target simulator (`shieldhit`, `fluka`, `geant4`, `topas`) | +| `-d`, `--output_dir` | Output directory for generated files | + +### Python API + +Import and call directly: + +```python +import json +from converter.api import get_parser_from_str + +with open("project.json") as f: + data = json.load(f) + +parser = get_parser_from_str("shieldhit") +parser.parse_configs(data) +files = parser.get_configs_json() + +for name, content in files.items(): + print(f"=== {name} ===") + print(content) +``` + +### Backend Integration + +The backend calls the converter before dispatching simulation tasks: + +```python +# In the simulation job submission flow +parser = get_parser_from_str(simulator_type) +parser.parse_configs(payload_dict) +configs = parser.get_configs_json() +# configs sent to Celery worker alongside simulator binary +``` + +## Error Handling + +Common conversion failures: + +| Scenario | Error | +|---|---| +| Unknown simulator string | `ValueError` | +| Missing required JSON key | `KeyError` with descriptive message | +| Invalid geometry (e.g. zone references missing figure) | `ValueError` with zone/figure context | +| Unsupported feature for target engine | `NotImplementedError` | + +The converter does **not** validate physics plausibility — it only checks structural correctness of the input JSON. diff --git a/src/content/docs/converter/fluka.md b/src/content/docs/converter/fluka.md new file mode 100644 index 0000000..d77bc58 --- /dev/null +++ b/src/content/docs/converter/fluka.md @@ -0,0 +1,199 @@ +--- +title: FLUKA +description: Converter internals for the FLUKA simulation engine. +--- + +The FLUKA converter produces a single monolithic input file (`fl_sim.inp`) containing all simulation parameters encoded as **cards** — FLUKA's fixed-format configuration units. + +## Output Files + +| File | Purpose | +|---|---| +| `fl_sim.inp` | Complete FLUKA input file with all cards | + +## FLUKA Card System + +FLUKA uses a card-based input format. Each card is an 80-character line specifying a configuration directive: + +``` +BEAM -0.150 0.0 0.0 0.0 0.0 0.0PROTON +BEAMPOS 0.0 0.0 -50.0 0.0 0.0 +GEOBEGIN COMBNAME + 0 0 +RPP BH -600.0 600.0 -600.0 600.0 -600.0 600.0 +RPP water -50.0 50.0 -50.0 50.0 0.0 100.0 +END +BH 5 +BH -water +target 5 +water +END +GEOEND +ASSIGNMA BLCKHOLE BH +ASSIGNMA WATER target +USRBIN 10. DOSE -21. 50.0 50.0 100.0bin1 +USRBIN -50.0 -50.0 0.0 1 1 400 & +RANDOMIZ 1. 1. +START 10000.0 +STOP +``` + +## Card Types + +The converter generates these card groups: + +| Card | Purpose | +|---|---| +| `BEAM` / `BEAMPOS` | Particle type, energy, beam position and direction | +| `GEOBEGIN` / `GEOEND` | Geometry block containing figures and zones | +| `RPP`, `RCC`, `SPH` | Geometry primitives (box, cylinder, sphere) | +| `ASSIGNMA` | Material-to-zone assignments | +| `USRBIN` | Scoring detectors (mesh binnings) | +| `RANDOMIZ` | Random number seed | +| `START` | Number of primary particles | +| `STOP` | End-of-input marker | + +## Parser Internals + +### Module Structure + +``` +fluka/ +├── __init__.py +├── parser.py # FlukaParser orchestration +├── input.py # FlukaInput dataclass—holds all parsed data +├── cards/ # Individual card generators +│ ├── __init__.py +│ ├── beam_card.py +│ ├── figure_card.py +│ ├── material_card.py +│ ├── region_card.py +│ ├── scoring_card.py +│ └── ... +└── helper_parsers/ # JSON → internal representation + ├── __init__.py + ├── beam_parser.py + ├── figure_parser.py + ├── material_parser.py + ├── region_parser.py + └── scoring_parser.py +``` + +### Architecture + +The FLUKA converter has a clean two-phase design: + +**Phase 1 — Parse** (helper_parsers) + +``` +Editor JSON → helper_parsers → FlukaInput dataclass +``` + +Each helper parser extracts its domain from the JSON and populates the shared `FlukaInput` object. + +**Phase 2 — Render** (cards) + +``` +FlukaInput → card generators → formatted card strings → fl_sim.inp +``` + +Each card generator takes the parsed data and produces correctly formatted 80-character card lines. + +### FlukaInput Dataclass + +Central data container holding all parsed configuration: + +```python +@dataclass +class FlukaInput: + beam: BeamData + figures: list[FlukaFigure] + regions: list[FlukaRegion] + materials: list[FlukaMaterial] + scorings: list[FlukaScoring] + settings: SimulationSettings +``` + +### Parsing Flow + +```python +class FlukaParser(Parser): + def parse_configs(self, json_data): + self.input = FlukaInput() + + # Phase 1: parse JSON into internal representation + parse_beam(json_data, self.input) + parse_figures(json_data, self.input) + parse_regions(json_data, self.input) + parse_materials(json_data, self.input) + parse_scorings(json_data, self.input) + + def get_configs_json(self): + # Phase 2: render to card format + lines = [] + lines += render_beam_cards(self.input) + lines += render_geometry_block(self.input) + lines += render_material_cards(self.input) + lines += render_scoring_cards(self.input) + lines += render_control_cards(self.input) + + return {"fl_sim.inp": "\n".join(lines)} +``` + +## Geometry Mapping + +FLUKA uses the same CSG approach as SHIELD-HIT12A but with FLUKA-specific keywords: + +| Editor Figure | FLUKA Primitive | Parameters | +|---|---|---| +| Box | `RPP` | xmin, xmax, ymin, ymax, zmin, zmax | +| Cylinder | `RCC` | center, axis vector, radius | +| Sphere | `SPH` | center, radius | + +### Zone → Region + +FLUKA calls zones "regions." The CSG syntax is similar: + +``` +target 5 +water +wrapper 5 +BH -water +``` + +- `+name` — inside the named body +- `-name` — outside the named body +- The `5` is the number of operators in the expression + +The converter maps YAPTIDE zone operations to FLUKA region expressions. + +### Black-Hole Boundary + +A `BLCKHOLE` region wraps the entire geometry. The converter expands the bounding box of all figures and creates an `RPP` enclosure, assigned the `BLCKHOLE` material. + +## Material Mapping + +FLUKA uses predefined material names. The converter maps YAPTIDE materials to FLUKA's built-in material library: + +| YAPTIDE Material | FLUKA Material | +|---|---| +| Water | `WATER` | +| Air | `AIR` | +| Aluminum | `ALUMINUM` | +| Custom | Defined via `MATERIAL` + `COMPOUND` cards | + +## Scoring + +The converter maps YAPTIDE detector types to FLUKA's `USRBIN` cards: + +| YAPTIDE Quantity | FLUKA Scoring | +|---|---| +| Dose | `DOSE` | +| Fluence | `FLUENCE` | +| LET | Requires post-processing | + +> **Note:** FLUKA's scoring capabilities differ from SHIELD-HIT12A. Not all YAPTIDE scoring options have direct FLUKA equivalents. + +## Common Issues + +| Problem | Cause | Fix | +|---|---|---| +| Card alignment errors | Lines not padded to 80 characters | Check card generators' formatting | +| Unknown material | YAPTIDE material has no FLUKA mapping | Add mapping or use `MATERIAL` + `COMPOUND` cards | +| Region expression too long | Complex CSG zone exceeds single-line limit | Check parenthesization and line continuation | diff --git a/src/content/docs/converter/geant4.md b/src/content/docs/converter/geant4.md new file mode 100644 index 0000000..b8b0946 --- /dev/null +++ b/src/content/docs/converter/geant4.md @@ -0,0 +1,189 @@ +--- +title: Geant4 +description: Converter internals for the Geant4 simulation engine. +--- + +The Geant4 converter produces two files: a **GDML** geometry file and a **macro** file that configures the beam, physics, and scoring. + +## Output Files + +| File | Purpose | Format | +|---|---|---| +| `geometry.gdml` | Geometry definition | XML (Geometry Description Markup Language) | +| `run.mac` | Simulation parameters | Geant4 macro commands | + +## GDML Output + +GDML (Geometry Description Markup Language) is an XML schema for describing detector geometries. The converter generates a valid GDML file that Geant4 loads at runtime. + +### Structure + +```xml + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +``` + +### Figure Mapping + +| Editor Figure | GDML Solid | Element | +|---|---|---| +| Box | `` | `x`, `y`, `z` (half-lengths) | +| Cylinder | `` | `rmin`, `rmax`, `z`, `startphi`, `deltaphi` | +| Sphere | `` | `r` | + +> **Important:** GDML uses **half-lengths** for box dimensions. The converter divides the editor's full dimensions by 2. + +### Volume Hierarchy + +GDML organizes geometry as a tree of volumes: + +1. **World volume** — outermost bounding box (auto-generated) +2. **Physical volumes** — placed inside the world with position and rotation +3. **Logical volumes** — link a solid shape to a material + +The converter flattens the editor's zone-based CSG into GDML's volume hierarchy. Each zone becomes a physical volume placed in the world. + +> **Note:** Geant4's GDML supports boolean solids (``, ``, ``) for CSG. The converter may use these for complex zone definitions, but simple zones map directly to placed volumes. + +## Macro Output + +The macro file configures everything that isn't geometry: + +``` +/run/initialize +/gun/particle proton +/gun/energy 150 MeV +/gun/position 0 0 -50 cm +/gun/direction 0 0 1 +/run/beamOn 10000 +``` + +### Macro Sections + +| Section | Commands | Purpose | +|---|---|---| +| Initialization | `/run/initialize` | Initialize the Geant4 kernel | +| Beam | `/gun/particle`, `/gun/energy`, `/gun/position`, `/gun/direction` | Primary particle source | +| Physics | Physics list configuration | Energy thresholds, models | +| Scoring | `/score/create/...`, `/score/quantity/...` | Detector meshes and quantities | +| Execution | `/run/beamOn N` | Start simulation with N primaries | + +## Parser Internals + +### Module Structure + +``` +geant4/ +├── __init__.py +├── parser.py # Geant4Parser orchestration +└── ... +``` + +### Parsing Flow + +```python +class Geant4Parser(Parser): + def parse_configs(self, json_data): + # Parse beam, geometry, materials, scoring from JSON + self._parse_beam(json_data) + self._parse_geometry(json_data) + self._parse_materials(json_data) + self._parse_scoring(json_data) + + def get_configs_json(self): + return { + "geometry.gdml": self._render_gdml(), + "run.mac": self._render_macro(), + } +``` + +The Geant4 parser is more compact than SHIELD-HIT12A or FLUKA because: +- GDML is XML (structured, not fixed-width) +- The macro format is line-oriented commands (no column alignment needed) + +### GDML Rendering + +The converter builds the GDML XML using string templates (not an XML library). Sections are rendered in order: + +1. **``** — positions and rotations for each figure +2. **``** — material references (Geant4 built-in names like `G4_WATER`) +3. **``** — one solid element per figure, plus the world solid +4. **``** — logical volumes (solid + material) and physical volumes (placement) +5. **``** — points to the world volume + +### Material Mapping + +Geant4 uses the NIST material database. The converter maps YAPTIDE materials to Geant4 names: + +| YAPTIDE Material | Geant4 Name | +|---|---| +| Water | `G4_WATER` | +| Air | `G4_AIR` | +| Aluminum | `G4_Al` | +| Vacuum | `G4_Galactic` | +| Custom | Defined inline with element composition | + +## WebAssembly Context + +When running via the Geant4 WebAssembly build in the browser, the converter's output is **consumed directly** by the in-browser Geant4 instance. The files are passed as in-memory strings — never written to disk. + +See [Geant4 WebAssembly](/for_developers/frontend/geant4-wasm/) for the browser execution flow. + +## Limitations + +| Limitation | Detail | +|---|---| +| **CSG complexity** | Complex boolean zone operations may not translate cleanly to GDML volume hierarchy | +| **Scoring** | Not all YAPTIDE scoring quantities have Geant4 equivalents | +| **Physics lists** | Limited physics list configuration compared to native Geant4 macros | +| **Rotation** | Nested rotations require careful coordinate transformation | + +## Common Issues + +| Problem | Cause | Fix | +|---|---|---| +| Invalid GDML schema | Malformed XML output | Check GDML rendering for unclosed tags | +| Volume overlap | Placed volumes intersect without boolean subtraction | Verify zone-to-volume mapping | +| Wrong units | GDML defaults may differ from editor conventions | Ensure `lunit="cm"` and `aunit="deg"` are set | +| Missing material | Material name not in Geant4 NIST database | Map to correct `G4_*` name or define custom | diff --git a/src/content/docs/converter/overview.md b/src/content/docs/converter/overview.md new file mode 100644 index 0000000..3a06548 --- /dev/null +++ b/src/content/docs/converter/overview.md @@ -0,0 +1,120 @@ +--- +title: Converter Overview +description: Architecture of the YAPTIDE format converter. +--- + +The converter is a standalone Python package that translates the editor's JSON project format into native input files for Monte Carlo particle transport simulators. + +## Purpose + +The YAPTIDE UI produces a JSON description of the simulation. Simulators like SHIELD-HIT12A, FLUKA, and Geant4 each expect their own specific input format. The converter bridges this gap. + +``` +Editor JSON ──> Converter ──> Simulator-specific input files +``` + +## Supported Engines + +| Engine | Parser Class | Output Files | Maturity | +|---|---|---|---| +| **SHIELD-HIT12A** | `ShieldhitParser` | `beam.dat`, `mat.dat`, `geo.dat`, `detect.dat` | Most complete | +| **FLUKA** | `FlukaParser` | `fl_sim.inp` | Solid | +| **Geant4** | `Geant4Parser` | `geometry.gdml`, `run.mac` | Good | +| **TOPAS** | `TopasParser` | `topas_config.txt` | Minimal / experimental | + +## Where the Converter Runs + +The converter is used in **two contexts**: + +| Context | How | When | +|---|---|---| +| **Backend** | Imported as a Python library by the Flask app | Server-side conversion before dispatching simulations to workers | +| **Frontend** | Compiled to WebAssembly via Pyodide, running in a Web Worker | Input file preview and Geant4 local simulations | + +Both contexts use the exact same codebase. + +## Core Abstractions + +### `Parser` Base Class + +All parsers inherit from the abstract `Parser` class in `converter/common.py`: + +```python +class Parser(ABC): + @abstractmethod + def parse_configs(self, json_data: dict) -> None: + """Parse the editor JSON and populate internal dataclasses.""" + pass + + @abstractmethod + def save_configs(self, output_dir: Path) -> None: + """Write generated files to disk.""" + pass + + @abstractmethod + def get_configs_json(self) -> dict: + """Return {filename: content} dict without writing to disk.""" + pass +``` + +### `SolidFigure` Hierarchy + +`converter/solid_figures.py` defines the geometry primitives: + +``` +SolidFigure (ABC) +├── BoxFigure (xLength, yLength, zLength) +├── CylinderFigure (radius, height) +└── SphereFigure (radius) +``` + +Each figure has position, rotation, and an `expand(margin)` method for generating the world-zone black-hole boundary. + +### Utility Functions + +`converter/common.py` provides: + +| Function | Purpose | +|---|---| +| `format_float(number, n)` | Format float with n decimal places (for fixed-width file formats) | +| `rotate(vector, angles)` | 3D Tait-Bryan rotation | +| `convert_beam_energy()` | MeV ↔ MeV/nucl based on particle type | + +## Directory Structure + +``` +converter/ +├── converter/ +│ ├── __init__.py +│ ├── api.py # Public API +│ ├── common.py # Parser base class, utilities +│ ├── main.py # CLI entry point +│ ├── solid_figures.py # Geometry primitives +│ ├── shieldhit/ # SHIELD-HIT12A parser +│ │ ├── __init__.py +│ │ ├── parser.py # ShieldhitParser +│ │ ├── beam.py # BeamConfig → beam.dat +│ │ ├── geo.py # GeoMatConfig → geo.dat + mat.dat +│ │ ├── detect.py # DetectConfig → detect.dat +│ │ └── ... +│ ├── fluka/ # FLUKA parser +│ │ ├── __init__.py +│ │ ├── parser.py # FlukaParser +│ │ ├── input.py # Input dataclass +│ │ ├── cards/ # Card generators (beam, figures, regions...) +│ │ └── helper_parsers/ # JSON → card parsing logic +│ ├── geant4/ # Geant4 parser +│ │ ├── __init__.py +│ │ ├── parser.py # Geant4Parser (GDML + macro) +│ │ └── ... +│ └── topas/ # TOPAS parser +│ ├── __init__.py +│ └── parser.py # TopasParser (minimal) +└── tests/ +``` + +## Related Pages + +- [Conversion Flow](/for_developers/converter/conversion-flow/) — step-by-step parsing pipeline +- [Adding a Simulator](/for_developers/converter/adding-a-simulator/) — how to add a new engine +- Engine-specific docs: [SHIELD-HIT12A](/for_developers/converter/shieldhit/), [FLUKA](/for_developers/converter/fluka/), [Geant4](/for_developers/converter/geant4/) diff --git a/src/content/docs/converter/shieldhit.md b/src/content/docs/converter/shieldhit.md new file mode 100644 index 0000000..4796659 --- /dev/null +++ b/src/content/docs/converter/shieldhit.md @@ -0,0 +1,183 @@ +--- +title: "SHIELD-HIT12A" +description: Converter internals for the SHIELD-HIT12A simulation engine. +--- + +SHIELD-HIT12A is the **primary** and most complete engine in the YAPTIDE converter. It produces four fixed-width text files that control particle transport simulation. + +## Output Files + +| File | Purpose | Config Class | +|---|---|---| +| `beam.dat` | Beam parameters | `BeamConfig` | +| `mat.dat` | Material definitions | `GeoMatConfig` (shared) | +| `geo.dat` | Geometry description | `GeoMatConfig` (shared) | +| `detect.dat` | Scoring detectors | `DetectConfig` | + +## File Formats + +### beam.dat + +Defines the particle source: + +``` +RNDSEED 89736501 0 +JPART0 2 +TMAX0 150.00000 0.00 +NSTAT 10000 0 +STRAGG 2 +MSCAT 2 +NUCRE 1 +``` + +**Key parameters:** + +| Keyword | Description | +|---|---| +| `RNDSEED` | Random number seed | +| `JPART0` | Particle type (1=H, 2=He, 6=C, 25=proton, etc.) | +| `TMAX0` | Beam energy in MeV (or MeV/nucl for ions) | +| `NSTAT` | Number of primary particles | +| `STRAGG` | Energy straggling model | +| `MSCAT` | Multiple scattering model | +| `NUCRE` | Nuclear reactions toggle | + +The converter handles the **energy unit conversion** between MeV (editor) and MeV/nucleon (SHIELD-HIT12A) automatically via `convert_beam_energy()`. + +### geo.dat + +Uses a zone-based CSG geometry format: + +``` + 0 0 Proton pencil beam in water + 0 1 geometry for simple simulations + RCC 1 0.000 0.000 0.000 0.000 0.000 20.000 + 5.000 + RCC 2 0.000 0.000 -1.000 0.000 0.000 22.000 + 6.000 + END + 001 +1 + 002 +2 -1 + END + 1 1 1 + 2 1000 +``` + +The file has **three sections**: + +1. **Figures** — geometric primitives (`RCC` = cylinder, `RPP` = box, `SPH` = sphere) +2. **Zones** — boolean CSG expressions using figure IDs (`+N` = inside, `-N` = outside) +3. **Medium assignments** — zone-to-material mapping + +The converter auto-generates a **black-hole boundary zone** that surrounds the entire geometry, filled with vacuum (material 1000). + +### mat.dat + +Defines materials using ICRU numbers or custom element compositions: + +``` +MEDIUM 1 +ICRU 276 +END +``` + +For custom materials, the converter writes element-by-element composition with density. + +### detect.dat + +Defines scoring meshes and quantities: + +``` +Geometry Cyl 0.000 0.000 0.000 CYL + 200.000 200.000 0.000 + 1.000 1.000 400.000 + 1 1 400 + DOSE + APTS Detector0 +``` + +**Detector types supported:** + +| Type | Keyword | Description | +|---|---|---| +| Mesh | `MSH` | Cartesian grid | +| Cylinder | `CYL` | Cylindrical mesh (r, θ, z) | +| Zone | `ZONE` | Score in a named geometric zone | + +**Scored quantities:** + +| Keyword | Quantity | +|---|---| +| `DOSE` | Absorbed dose | +| `FLUENCE` | Particle fluence | +| `LETFLU` | Fluence-weighted LET | +| `DLETFLU` | Dose-weighted LET (via fluence) | +| `TLETFLU` | Track-averaged LET | +| `SPC` | Energy spectrum | +| `APTS` | Average point of the track scored | + +## Parser Internals + +### Module Structure + +``` +shieldhit/ +├── __init__.py +├── parser.py # ShieldhitParser orchestration +├── beam.py # BeamConfig dataclass + rendering +├── geo.py # GeoMatConfig — figures, zones, materials +├── detect.py # DetectConfig — detectors + scoring +└── ... +``` + +### Parsing Flow + +```python +class ShieldhitParser(Parser): + def parse_configs(self, json_data): + self.beam_config = BeamConfig() + self.beam_config.parse(json_data) + + self.geo_mat_config = GeoMatConfig() + self.geo_mat_config.parse(json_data) + + self.detect_config = DetectConfig() + self.detect_config.parse(json_data) +``` + +Each config class follows the same pattern: +1. Extract relevant JSON keys +2. Validate required fields +3. Convert editor units to SHIELD-HIT12A units +4. Populate a fixed-width formatted string + +### Fixed-Width Formatting + +SHIELD-HIT12A input files use **Fortran-style fixed-width columns**. The converter uses `format_float()` from `converter/common.py`: + +```python +from converter.common import format_float + +format_float(5.0, 10) # " 5.00000" — 10 chars, right-aligned +format_float(0.001, 10) # " 0.00100" +``` + +This ensures correct alignment expected by the SHIELD-HIT12A parser. + +### Zone Construction + +The converter transforms the editor's figure + zone model into SHIELD-HIT12A's CSG syntax: + +1. Each figure becomes a named primitive (RCC, RPP, SPH) +2. Each zone becomes a boolean expression: `+N` (inside figure N), `-N` (outside figure N) +3. A surrounding black-hole zone is auto-generated using `SolidFigure.expand()` +4. Zone numbering and material assignment are serialized in the final section + +## Common Issues + +| Problem | Cause | Fix | +|---|---|---| +| Overlapping zones | Zone definitions in JSON share the same figure without subtraction | Ensure zones correctly subtract each other in the editor | +| Missing black-hole boundary | World zone not generated | Check that `expand()` covers all figures | +| Wrong energy units | MeV vs MeV/nucl mismatch | `convert_beam_energy()` handles this; verify `heavy_ion_a` field | +| Truncated numbers | Value too large for column width | Adjust precision in `format_float()` | diff --git a/src/content/docs/converter/testing.md b/src/content/docs/converter/testing.md new file mode 100644 index 0000000..91c5f19 --- /dev/null +++ b/src/content/docs/converter/testing.md @@ -0,0 +1,212 @@ +--- +title: Converter Testing +description: Test strategy and patterns for the YAPTIDE converter. +--- + +The converter uses a **golden-file** testing strategy: parser output is compared character-by-character against known-good reference files. This catches any unintended formatting or content changes. + +## Running Tests + +```bash +cd converter +poetry install +poetry run pytest -v +``` + +Run tests for a specific engine: + +```bash +poetry run pytest tests/shieldhit/ -v +poetry run pytest tests/fluka/ -v +poetry run pytest tests/geant4/ -v +``` + +## Test Structure + +``` +tests/ +├── conftest.py # Shared fixtures +├── test_solid_figures.py # Unit tests for geometry primitives +├── test_test.py # Smoke test +├── shieldhit/ +│ ├── __init__.py +│ ├── test_shieldhit.py # ShieldhitParser golden-file tests +│ └── test_data/ +│ ├── project.json # Input project JSON +│ ├── beam.dat # Expected beam.dat output +│ ├── geo.dat # Expected geo.dat output +│ ├── mat.dat # Expected mat.dat output +│ └── detect.dat # Expected detect.dat output +├── fluka/ +│ ├── __init__.py +│ ├── test_fluka.py +│ └── test_data/ +│ ├── project.json +│ └── fl_sim.inp +├── geant4/ +│ ├── __init__.py +│ ├── test_geant4.py +│ └── test_data/ +│ ├── project.json +│ ├── geometry.gdml +│ └── run.mac +└── topas/ + └── ... +``` + +## Golden-File Pattern + +The core testing approach: + +```python +import json +from pathlib import Path +from converter.api import get_parser_from_str + +TEST_DIR = Path(__file__).parent / "test_data" + + +def test_beam_dat(): + """Parsed beam.dat matches the golden reference.""" + with open(TEST_DIR / "project.json") as f: + project = json.load(f) + + parser = get_parser_from_str("shieldhit") + parser.parse_configs(project) + configs = parser.get_configs_json() + + expected = (TEST_DIR / "beam.dat").read_text() + assert configs["beam.dat"] == expected +``` + +### How It Works + +1. A `project.json` file captures a real editor project export. +2. The test runs the parser on that JSON. +3. The parser's output is compared **exactly** against pre-saved reference files. +4. Any character-level difference fails the test. + +### Updating Golden Files + +When you **intentionally** change the converter output format: + +1. Run the parser manually to generate new output. +2. Inspect the diff carefully — every change should be intentional. +3. Replace the reference files in `test_data/`. +4. Commit the updated golden files alongside the code change. + +```bash +# Generate new output for inspection +cd converter +poetry run python -c " +import json +from converter.api import get_parser_from_str + +with open('tests/shieldhit/test_data/project.json') as f: + data = json.load(f) + +parser = get_parser_from_str('shieldhit') +parser.parse_configs(data) +for name, content in parser.get_configs_json().items(): + print(f'=== {name} ===') + print(content) +" +``` + +## Unit Tests + +### Solid Figures + +`test_solid_figures.py` tests geometry primitives independently: + +```python +from converter.solid_figures import BoxFigure, CylinderFigure, SphereFigure + + +def test_box_expand(): + """Box expand() increases dimensions by margin.""" + box = BoxFigure(position=[0, 0, 0], rotation=[0, 0, 0], + x_edge_length=10, y_edge_length=10, z_edge_length=20) + expanded = box.expand(margin=5.0) + assert expanded.x_edge_length == 20.0 + assert expanded.z_edge_length == 30.0 + + +def test_cylinder_expand(): + """Cylinder expand() increases radius and height.""" + cyl = CylinderFigure(position=[0, 0, 0], rotation=[0, 0, 0], + radius=5, height=20) + expanded = cyl.expand(margin=3.0) + assert expanded.radius == 8.0 + assert expanded.height == 26.0 +``` + +These tests verify that `expand()` correctly generates figures for the black-hole boundary. + +### Utility Functions + +```python +from converter.common import format_float, rotate + + +def test_format_float(): + """format_float produces correctly padded output.""" + assert format_float(5.0, 10) == " 5.00000" + + +def test_rotate(): + """3D rotation applies Tait-Bryan angles correctly.""" + result = rotate([1, 0, 0], [0, 0, 90]) + assert abs(result[0]) < 1e-10 + assert abs(result[1] - 1.0) < 1e-10 +``` + +## Fixtures + +`conftest.py` provides shared test utilities: + +```python +import json +import pytest +from pathlib import Path + + +@pytest.fixture +def sample_project(): + """Load the standard test project JSON.""" + path = Path(__file__).parent / "test_data" / "project.json" + with open(path) as f: + return json.load(f) +``` + +## Writing New Tests + +### For a New Scoring Quantity + +1. Add the scoring quantity to an existing `project.json` or create a new test project. +2. Run the parser and inspect the output. +3. Save the correct output as a new golden file. +4. Write a test comparing parser output against the golden file. + +### For a Bug Fix + +1. Create a minimal `project.json` that reproduces the bug. +2. Run the parser — verify it produces wrong output. +3. Fix the bug. +4. Save the now-correct output as the golden file. +5. Write the test. The test documents the bug and prevents regression. + +## Pre-Commit Hooks + +The converter uses pre-commit for code quality: + +```bash +cd converter +poetry run pre-commit install +poetry run pre-commit run --all-files +``` + +Hooks include: +- **YAPF** — Python code formatting +- **isort** — import ordering +- Standard file checks (trailing whitespace, end-of-file newline) diff --git a/src/content/docs/docker-setup/docker-celery.mdx b/src/content/docs/docker-setup/docker-celery.mdx new file mode 100644 index 0000000..9ee28f5 --- /dev/null +++ b/src/content/docs/docker-setup/docker-celery.mdx @@ -0,0 +1,157 @@ +--- +title: Docker Setup — Celery Workers +description: Run the full YAPTIDE backend with Celery workers in Docker containers for integration testing and development. +--- + +import { Tabs, TabItem, Aside } from "@astrojs/starlight/components"; + +This guide covers the **development** Docker setup — you get a production-like environment without manually managing Redis, PostgreSQL, and worker processes. + +## When to use this setup + +| Scenario | Recommended setup | +| ------------------------------------------ | -------------------------------------------------------------------------- | +| Frontend / Geant4 in-browser only | [Local Frontend Demo](/for_developers/local-setup/local-frontend-demo/) | +| Full stack with SHIELD-HIT12A or FLUKA | **This page** | +| Full stack with SLURM cluster | [Local SLURM](/for_developers/local-setup/local-slurm/) | + +:::tip + Prefer a local setup? See [Celery Workers — Local](/for_developers/local-setup/local-celery/) for the non-containerized version. +::: + +## Prerequisites + +- **Docker Engine 20.10+** +- **Git** + +## Step 1: Clone and start +```bash +git clone https://github.com/yaptide/yaptide.git +cd yaptide +``` + +Two startup options are available: + +**Standard mode** — builds and starts all containers: + + + +```bash +scripts/start_with_docker.sh +``` + + + +```powershell +scripts/start_with_docker.ps1 +``` + + + + +**Develop mode** — same as standard, but mounts your local source code into the containers so code changes are reflected without rebuilding: + + + +```bash +scripts/start_with_docker_develop.sh +``` + + + +```powershell +scripts/start_with_docker_develop.ps1 +``` + + + + +The script detects your Docker version and uses the fastest available method automatically. + +## Step 2: Create a user +```bash +docker exec -w /usr/local/app/ yaptide_flask \ + ./yaptide/admin/db_manage.py add-user admin --password password +``` + +## Step 3: Start the frontend + +The backend API is available at **http://localhost:5000** (via NGINX). Clone the frontend repo in a separate terminal: + +```bash +git clone https://github.com/yaptide/ui.git +cd ui +git submodule update --init --recursive +``` + +`git submodule update` pulls the [converter](https://github.com/yaptide/converter) submodule. + +Create a `.env` file in the `ui/` directory: +```bash title="ui/.env" +REACT_APP_BACKEND_URL=http://localhost:5000 +``` + +Then run the following command from the `ui/` directory: + +```bash +docker compose up +``` + +Open **http://localhost** and log in using credentials you created (username: `admin`, password: `password`). The setup is complete now. + +:::caution + With a Chromium-based browser, use `https://localhost:8443` as the backend URL instead of `http://localhost:5000` to avoid cookie issues with browser security policies. +```bash title="ui/.env" + REACT_APP_BACKEND_URL=https://localhost:8443 +``` +::: + +## Stopping +```bash +docker compose down +``` + +To also delete all data (PostgreSQL volume, simulator cache): +```bash +docker compose down -v +``` + +## Managing the database + +View all management commands: +```bash +docker exec -w /usr/local/app/ yaptide_flask \ + ./yaptide/admin/db_manage.py --help +``` + +### Connecting to PostgreSQL directly + +The PostgreSQL container exposes port 5432. Connect with any PostgreSQL client: +```bash +psql -h localhost -U yaptide_user -d yaptide_db +# password: yaptide_password (default) +``` + +Or use a GUI tool like pgAdmin. + +## Container overview + +| Container | Port | Purpose | +| --------------------------- | ------------------------- | ---------------- | +| `yaptide_nginx` | 5000 (HTTP), 8443 (HTTPS) | Reverse proxy | +| `yaptide_flask` | 6000 (internal) | Flask API | +| `yaptide_postgresql` | 5432 | Database | +| `yaptide_redis` | 6379 (internal) | Celery broker | +| `yaptide_simulation_worker` | — | Runs simulations | +| `yaptide_helper_worker` | — | Post-processing | + +Check container health: +```bash +docker compose ps +``` + +View logs: +```bash +docker compose logs -f yaptide_flask +docker compose logs -f yaptide_simulation_worker +``` \ No newline at end of file diff --git a/src/content/docs/docker-setup/docker-frontend-demo.mdx b/src/content/docs/docker-setup/docker-frontend-demo.mdx new file mode 100644 index 0000000..9166739 --- /dev/null +++ b/src/content/docs/docker-setup/docker-frontend-demo.mdx @@ -0,0 +1,45 @@ +--- +title: Frontend Demo — Docker (Geant4) +description: Production Docker instructions for the frontend-only/demo build of YAPTIDE UI. +--- + +In demo mode, YAPTIDE runs entirely in the browser. +Geant4 is compiled to WebAssembly and executes locally — no backend, no login, no internet connection needed after the initial load. + +This is the setup deployed at [yaptide.github.io](https://yaptide.github.io). The Docker image simply serves the static production build — no backend is included. + +## When to use this setup + +| Scenario | Recommended setup | +| ------------------------------------------ | -------------------------------------------------------------------------- | +| Frontend / Geant4 in-browser only | **This page** | +| Full stack with SHIELD-HIT12A or FLUKA | [Docker Celery](/for_developers/docker-setup/docker-celery/) | +| Full stack with SLURM cluster | [Docker SLURM](/for_developers/docker-setup/docker-slurm/) | + +:::tip + Prefer a local setup? See [Frontend Demo — Local](/for_developers/local-setup/local-frontend-demo/) for the non-containerized version. +::: + +## Prerequisites + +- **Docker Engine 20.10+** +- **Git** + +## Setup + +```bash +git clone https://github.com/yaptide/ui.git +cd ui +git submodule update --init --recursive +docker compose up +``` + +`git submodule update` pulls the [converter](https://github.com/yaptide/converter) submodule. + +The UI is then served at **http://localhost**. + +## Stopping + +```bash +docker compose down +``` diff --git a/src/content/docs/docker-setup/docker-slurm.mdx b/src/content/docs/docker-setup/docker-slurm.mdx new file mode 100644 index 0000000..3f26905 --- /dev/null +++ b/src/content/docs/docker-setup/docker-slurm.mdx @@ -0,0 +1,24 @@ +--- +title: Docker Setup — SLURM +description: Run the full YAPTIDE backend in Docker with Keycloak authentication for SLURM integration. +--- + +This guide covers running the full YAPTIDE backend in Docker with Keycloak enabled — useful for integration testing and deployments that require SLURM / PLGrid. + +This is the setup deployed at [yaptide.c3.plgrid.pl](https://yaptide.c3.plgrid.pl). + +## When to use this setup + +| Scenario | Recommended setup | +| ------------------------------------------ | -------------------------------------------------------------------------- | +| Frontend / Geant4 in-browser only | [Local Frontend Demo](/for_developers/local-setup/local-frontend-demo/) | +| Full stack with SHIELD-HIT12A or FLUKA | [Local Celery](/for_developers/local-setup/local-celery/) | +| Full stack with SLURM cluster | **This page** | + +:::tip + Prefer a local setup? See [Local — SLURM](/for_developers/local-setup/local-slurm/) for the non-containerized version. +::: + +:::caution[Work in Progress] + This page is not yet complete — we are still working on the SLURM integration and will update this guide once it's ready. In the meantime, see the [Docker — Celery](/for_developers/docker-setup/docker-celery/) guide for the Celery setup. +::: diff --git a/src/content/docs/frontend/3d-editor.md b/src/content/docs/frontend/3d-editor.md new file mode 100644 index 0000000..27ebd4e --- /dev/null +++ b/src/content/docs/frontend/3d-editor.md @@ -0,0 +1,178 @@ +--- +title: 3D Editor +description: Architecture of the Three.js-based 3D simulation editor. +--- + +The 3D editor is the core of the YAPTIDE frontend. It provides a visual environment for building particle transport simulation geometries, defining beams, and configuring scoring. + +## YaptideEditor + +The editor is implemented in `ThreeEditor/js/YaptideEditor.js` — a ~780-line imperative class using the prototype pattern (forked from the Three.js editor project). It is exposed globally as `window.YAPTIDE_EDITOR` for debugging. + +The `Store` context holds the singleton `YaptideEditor` instance, accessible via `useStore()`. + +## Object Managers + +The editor uses a manager pattern to organize different simulation components: + +| Manager | What it Manages | Location | +|---|---|---| +| `FigureManager` | Geometric primitives | `Simulation/Figures/` | +| `ZoneManager` | Boolean CSG zones | `Simulation/Zones/` | +| `MaterialManager` | Material definitions | `Simulation/Materials/` | +| `DetectorManager` | Scoring detectors | `Simulation/Detectors/` | +| `ScoringManager` | Outputs, quantities, filters | `Simulation/Scoring/` | +| `SpecialComponentManager` | CT cubes, beam modulators | `Simulation/Special/` | +| `Beam` | Particle source | `Simulation/Physics/Beam.ts` | +| `Physics` | Physics model settings | `Simulation/Physics/` | + +### FigureManager + +Manages 3D solid primitives: + +| Figure Type | Three.js Geometry | Parameters | +|---|---|---| +| `BoxFigure` | `BoxGeometry` | `xLength`, `yLength`, `zLength` | +| `CylinderFigure` | `HollowCylinderGeometry` | `radius`, `height` (+ optional inner radius) | +| `SphereFigure` | `SphereGeometry` | `radius` | + +Each figure has position, rotation, and a unique UUID. Figures are rendered in the 3D viewport and referenced by zones. + +### ZoneManager + +Zones define regions via **Constructive Solid Geometry** (CSG): + +- **Union** — combine figures to form a larger region +- **Intersection** — take the overlap of figures +- **Subtraction** — remove one figure's volume from another + +Each zone is assigned a material. The `worldZone` is a special bounding zone that defines the simulation boundary. + +### DetectorManager + +Four detector types for scoring: + +| Type | Geometry | Use Case | +|---|---|---| +| `Cylinder` | Cylindrical mesh | Depth-dose curves, radial profiles | +| `Mesh` | Rectangular grid | 2D/3D dose maps | +| `Zone` | Matches a zone's geometry | Score within a specific region | +| `All` | Entire simulation volume | Global scoring | + +## Viewport + +`ThreeEditor/js/viewport/ViewportManager.js` manages the 3D rendering: + +### 4-Way Split View + +The viewport supports a **quad-split** layout: + +``` +┌──────────────┬──────────────┐ +│ Top (XY) │ Top (XZ) │ +│ │ │ +├──────────────┼──────────────┤ +│ Side (YZ) │ Perspective │ +│ │ (3D) │ +└──────────────┴──────────────┘ +``` + +Each pane can be individually resized. The split is implemented using `split-grid`. + +### Controls + +- **Orbit controls** — rotate, pan, zoom the perspective view +- **Transform gizmos** — translate, rotate, and scale selected objects +- **Selection box** — click-to-select objects in any view +- **Grid helpers** — reference grid for spatial orientation +- **Camera helpers** — beam direction indicator + +### CSG Clipping + +The viewport supports **CSG clipped views** for cross-section visualization. This lets users see inside complex zone configurations. + +## Sidebar + +`ThreeEditor/components/Sidebar/EditorSidebar.tsx` provides a 3-tab sidebar: + +### Geometry Tab + +- **Figures** — tree view of all primitives, with add/remove/edit +- **Zones** — tree view of boolean zone definitions +- **Detectors** — tree view of scoring detectors +- **Special components** — modulators, CT cubes + +### Scoring Tab + +- **Outputs** — each output links a detector to scored quantities +- **Quantities** — dose, fluence, LET, etc. +- **Filters** — particle filters for conditional scoring + +### Settings Tab + +- **Beam** — particle type, energy, position, direction, divergence +- **Physics** — energy loss, nuclear reactions, scattering models + +## Context Switching + +`EditorContext.ts` manages which sidebar context is active: + +| Context | Sidebar Content | Viewport Behavior | +|---|---|---| +| `geometry` | Figures, zones, detectors | Full 3D editing with gizmos | +| `scoring` | Outputs, quantities, filters | Detector geometry visualization | +| `settings` | Beam, physics | Beam direction visualization | + +## Signals + +The editor uses the `signals` library for event-driven communication between the imperative editor core and the React UI layer. + +### Key Signals + +| Signal | Triggered When | +|---|---| +| `objectAdded` | A new object is added to the scene | +| `objectRemoved` | An object is removed | +| `objectChanged` | An object's properties change | +| `objectSelected` | An object is clicked/selected | +| `zoneGeometryChanged` | A zone's CSG definition changes | +| `scoringQuantityChanged` | A scoring quantity is added/modified | +| `editorCleared` | The scene is completely cleared | +| `sceneGraphChanged` | The scene hierarchy changes | +| `historyChanged` | The undo/redo stack changes | + +### React Bridge + +Use the `useSignal` hook to subscribe to signals in React components: + +```typescript +import { useSignal } from '../hooks/useSignal'; + +function MyComponent() { + const [selectedObject, setSelectedObject] = useState(null); + + useSignal('objectSelected', (object) => { + setSelectedObject(object); + }); + + return
{selectedObject?.name}
; +} +``` + +## Serialization + +The editor serializes its entire state to a JSON format. This JSON: + +- Is the input to the converter (JSON → simulator input files) +- Is auto-saved to `localStorage` on every change +- Can be exported/imported as `.json` files +- Is sent to the backend when submitting a simulation + +The serialization captures: +- All figures with geometry, position, rotation +- Zone definitions (boolean operations + material assignments) +- Beam configuration +- Detector geometries +- Scoring outputs, quantities, and filters +- Physics settings +- Special components diff --git a/src/content/docs/frontend/adding-commands.md b/src/content/docs/frontend/adding-commands.md new file mode 100644 index 0000000..bacf753 --- /dev/null +++ b/src/content/docs/frontend/adding-commands.md @@ -0,0 +1,241 @@ +--- +title: Adding Commands +description: How to implement new editor commands with undo/redo support. +--- + +All editor mutations (adding figures, changing materials, moving objects) go through the **Command pattern** to enable undo/redo. This page explains how to create new commands. + +## Command Pattern Overview + +``` +User action + │ + ▼ +new SomeCommand(editor, params) + │ + ▼ +editor.execute(command) + │ + ├── command.execute() ← applies the change + ├── push to undo stack + └── signal: historyChanged +``` + +When the user hits **Ctrl+Z**: +``` +editor.undo() + │ + ▼ +command.undo() ← reverses the change + │ + ├── pop from undo stack + ├── push to redo stack + └── signal: historyChanged +``` + +## Command Base Class + +Every command extends the base `Command` class: + +```typescript +class Command { + id: number; + name: string; + updatable: boolean; + + constructor(editor: YaptideEditor) { + this.id = -1; + this.name = ''; + this.updatable = false; + } + + execute(): void { + // Override: apply the change + } + + undo(): void { + // Override: reverse the change + } + + toJSON(): object { + // Override: serialize for history persistence + return { type: this.constructor.name }; + } + + fromJSON(json: object): void { + // Override: deserialize from history + } +} +``` + +## Creating a New Command + +### Step 1: Define the Command Class + +```typescript +// ThreeEditor/js/commands/SetMaterialCommand.ts +import { Command } from './Command'; + +class SetMaterialCommand extends Command { + private object: SimulationZone; + private oldMaterialUuid: string; + private newMaterialUuid: string; + + constructor( + editor: YaptideEditor, + object: SimulationZone, + newMaterialUuid: string + ) { + super(editor); + this.name = 'Set Material'; + this.object = object; + this.oldMaterialUuid = object.materialUuid; + this.newMaterialUuid = newMaterialUuid; + } + + execute(): void { + this.object.materialUuid = this.newMaterialUuid; + this.editor.signals.objectChanged.dispatch(this.object); + } + + undo(): void { + this.object.materialUuid = this.oldMaterialUuid; + this.editor.signals.objectChanged.dispatch(this.object); + } + + toJSON(): object { + return { + type: 'SetMaterialCommand', + objectUuid: this.object.uuid, + oldMaterialUuid: this.oldMaterialUuid, + newMaterialUuid: this.newMaterialUuid, + }; + } + + fromJSON(json: any): void { + this.object = this.editor.objectByUuid(json.objectUuid); + this.oldMaterialUuid = json.oldMaterialUuid; + this.newMaterialUuid = json.newMaterialUuid; + } +} +``` + +### Step 2: Execute the Command + +In the UI component that triggers the change: + +```typescript +const handleMaterialChange = (zone: SimulationZone, materialUuid: string) => { + editor.execute( + new SetMaterialCommand(editor, zone, materialUuid) + ); +}; +``` + +### Step 3: Dispatch Signals + +Always dispatch the appropriate signal in `execute()` and `undo()` so the React UI updates: + +| Signal | When to dispatch | +|---|---| +| `objectAdded` | New object created | +| `objectRemoved` | Object deleted | +| `objectChanged` | Object properties changed | +| `zoneGeometryChanged` | Zone boolean operations changed | +| `scoringQuantityChanged` | Scoring configuration changed | +| `sceneGraphChanged` | Scene hierarchy changed | + +## Updatable Commands + +Some commands should be **merged** when they occur rapidly in succession (e.g., dragging an object). Set `updatable = true`: + +```typescript +class MoveObjectCommand extends Command { + constructor(editor, object, newPosition, oldPosition) { + super(editor); + this.name = 'Move Object'; + this.updatable = true; // ← merge rapid moves + // ... + } + + update(command: MoveObjectCommand): void { + // Called instead of creating a new history entry + this.newPosition = command.newPosition; + } +} +``` + +When `editor.execute()` sees a command with `updatable = true` and the previous command has the same type and `id`, it calls `update()` instead of pushing a new entry. This prevents clogging the undo stack with every mouse move event. + +### Existing Updatable Commands + +| Command | Why Updatable | +|---|---| +| `SetPositionCommand` | Dragging objects generates many moves | +| `SetRotationCommand` | Continuous rotation via gizmo | +| `SetScaleCommand` | Continuous scaling via gizmo | +| `SetValueCommand` | Slider/input field rapid changes | + +## Writing Tests for Commands + +### Test Pattern + +```typescript +// __tests__/commands/SetMaterialCommand.test.ts +describe('SetMaterialCommand', () => { + let editor: YaptideEditor; + + beforeEach(() => { + editor = new YaptideEditor(); + // Set up scene with figures, zones, materials + }); + + test('execute changes material', () => { + const zone = editor.zoneManager.zones[0]; + const newMaterial = 'mat-002'; + + editor.execute(new SetMaterialCommand(editor, zone, newMaterial)); + + expect(zone.materialUuid).toBe(newMaterial); + }); + + test('undo restores original material', () => { + const zone = editor.zoneManager.zones[0]; + const originalMaterial = zone.materialUuid; + const newMaterial = 'mat-002'; + + editor.execute(new SetMaterialCommand(editor, zone, newMaterial)); + editor.undo(); + + expect(zone.materialUuid).toBe(originalMaterial); + }); + + test('redo re-applies the change', () => { + const zone = editor.zoneManager.zones[0]; + const newMaterial = 'mat-002'; + + editor.execute(new SetMaterialCommand(editor, zone, newMaterial)); + editor.undo(); + editor.redo(); + + expect(zone.materialUuid).toBe(newMaterial); + }); +}); +``` + +### Running Tests + +```bash +npm test -- --testPathPattern=commands +``` + +## Checklist for New Commands + +1. Extend `Command` base class +2. Store both **old** and **new** values in the constructor +3. Implement `execute()` — apply the change + dispatch signals +4. Implement `undo()` — reverse the change + dispatch signals +5. Implement `toJSON()` / `fromJSON()` for history serialization +6. If applicable, set `updatable = true` and implement `update()` +7. Write unit tests covering execute, undo, redo, and serialization +8. Use `editor.execute(new YourCommand(...))` — never mutate state directly diff --git a/src/content/docs/frontend/auth-flows.md b/src/content/docs/frontend/auth-flows.md new file mode 100644 index 0000000..342f006 --- /dev/null +++ b/src/content/docs/frontend/auth-flows.md @@ -0,0 +1,220 @@ +--- +title: Auth Flows +description: Authentication implementation in the YAPTIDE frontend. +--- + +The frontend supports two authentication modes and a demo mode that bypasses auth entirely. + +## Mode Selection + +| Mode | Activated By | Backend Required | +|---|---|---| +| **Standard** (username/password) | Default | Yes | +| **Keycloak SSO** (PLGrid) | `REACT_APP_ALT_AUTH=plg` | Yes + Keycloak | +| **Demo** (no auth) | `REACT_APP_TARGET=demo` | No | + +## Standard Authentication + +### Login Flow + +```typescript +// AuthService.tsx — simplified +const login = async (username: string, password: string) => { + const response = await ky.post('auth/login', { + json: { username, password }, + credentials: 'include' // sends/receives httpOnly cookies + }); + + const { accessExp } = await response.json(); + + // Store user info in localStorage for persistence across refreshes + localStorage.setItem('user', JSON.stringify({ username })); + + // Start auto-refresh timer + startRefreshTimer(accessExp); +}; +``` + +### Auto-Refresh + +The UI auto-refreshes the access token at **1/3 of its lifetime**: + +```typescript +const startRefreshTimer = (accessExp: number) => { + const now = Date.now() / 1000; + const ttl = accessExp - now; + const refreshIn = (ttl / 3) * 1000; // milliseconds + + setTimeout(async () => { + const response = await ky.get('auth/refresh', { + credentials: 'include' + }); + const { accessExp: newExp } = await response.json(); + startRefreshTimer(newExp); + }, refreshIn); +}; +``` + +This creates a self-sustaining refresh loop. If the refresh fails (e.g., refresh token expired), the user is logged out. + +### Logout + +```typescript +const logout = async () => { + await ky.delete('auth/logout', { credentials: 'include' }); + localStorage.removeItem('user'); + // Reset the UI to the login tab +}; +``` + +### Session Persistence + +On page load, the UI checks `localStorage` for a saved user and attempts a token refresh: + +```typescript +// On app load +const savedUser = localStorage.getItem('user'); +if (savedUser) { + try { + await refreshToken(); + // Session restored + } catch { + localStorage.removeItem('user'); + // Session expired, show login + } +} +``` + +## Keycloak SSO + +### Configuration + +`KeycloakAuthService.tsx` initializes the Keycloak JS SDK: + +```typescript +const keycloak = new Keycloak({ + url: config.keycloakBaseUrl, // REACT_APP_KEYCLOAK_BASE_URL + realm: config.keycloakRealm, // REACT_APP_KEYCLOAK_REALM + clientId: config.keycloakClientId // REACT_APP_KEYCLOAK_CLIENT_ID +}); +``` + +### Init Options + +```typescript +keycloak.init({ + onLoad: 'check-sso', + pkceMethod: 'S256', + silentCheckSsoRedirectUri: `${window.location.origin}/silent-check-sso.html`, + checkLoginIframe: false +}); +``` + +- **`check-sso`** — checks for existing session without forcing login +- **`pkceMethod: 'S256'`** — PKCE with SHA-256 challenge (prevents authorization code interception) +- **`silentCheckSsoRedirectUri`** — invisible iframe for session checks without page reload + +### Token Exchange + +After Keycloak authentication, the frontend exchanges the Keycloak token with the YAPTIDE backend: + +```typescript +const exchangeKeycloakToken = async () => { + const response = await ky.post('auth/keycloak', { + headers: { + Authorization: `Bearer ${keycloak.token}` + }, + credentials: 'include' + }); + + const { accessExp } = await response.json(); + startRefreshTimer(accessExp); +}; +``` + +The backend validates the Keycloak token, creates/updates the user, and issues local JWT cookies. + +### Auto-Refresh + +Keycloak tokens are refreshed independently of the YAPTIDE tokens: + +```typescript +// Refresh Keycloak token when < 5 minutes remaining +keycloak.onTokenExpired = () => { + keycloak.updateToken(300).then((refreshed) => { + if (refreshed) { + // Re-exchange with backend + exchangeKeycloakToken(); + } + }); +}; +``` + +### PLGrid Service Check + +The UI checks the Keycloak token for PLGrid service claims: + +```typescript +const hasYaptideAccess = keycloak.tokenParsed?.PLG_YAPTIDE_ACCESS === true; + +if (!hasYaptideAccess) { + // Show dialog: "You need to enroll in the YAPTIDE PLGrid service" + showServiceRejectionDialog(); +} +``` + +## Demo Mode + +When `REACT_APP_TARGET=demo`: + +```typescript +// ConfigService.tsx +const demoMode = process.env.REACT_APP_TARGET === 'demo'; + +// AuthService.tsx +if (config.demoMode) { + // Skip all auth logic + // User is "anonymous" + // No backend communication + return; +} +``` + +In demo mode: +- The login tab is hidden +- No API calls are made +- Only Geant4 Wasm simulations work +- No results are persisted + +## Server Reachability + +`AuthService.tsx` includes a **reachability poller** that periodically checks if the backend is accessible: + +```typescript +const checkServerReachable = async () => { + try { + await ky.get('auth/status', { credentials: 'include', timeout: 5000 }); + setServerReachable(true); + } catch { + setServerReachable(false); + } +}; +``` + +If the server becomes unreachable, the UI shows a notification and disables simulation submission (remote simulations only — Geant4 Wasm continues to work). + +## The `authKy` Client + +`AuthService` exports an `authKy` HTTP client — a pre-configured `ky` instance with: + +```typescript +const authKy = ky.create({ + prefixUrl: config.backendUrl, + credentials: 'include', + hooks: { + afterResponse: [snakeToCamelTransformer] + } +}); +``` + +All backend API calls in the frontend use `authKy` to ensure cookies are sent and responses are camelCased. diff --git a/src/content/docs/frontend/geant4-wasm.md b/src/content/docs/frontend/geant4-wasm.md new file mode 100644 index 0000000..b24f6fe --- /dev/null +++ b/src/content/docs/frontend/geant4-wasm.md @@ -0,0 +1,165 @@ +--- +title: Geant4 WebAssembly +description: Running Geant4 simulations in the browser via WebAssembly. +--- + +YAPTIDE supports running **Geant4 simulations entirely in the browser** using a WebAssembly build of the Geant4 toolkit. This enables simulation execution without any backend infrastructure. + +## Architecture + +``` +┌───────────────────────────────────────────────────┐ +│ Browser │ +│ │ +│ ┌──────────┐ ┌────────────┐ ┌────────────┐ │ +│ │ Editor │ → │ Pyodide │ → │ Geant4 │ │ +│ │ JSON │ │ Converter │ │ Wasm Worker│ │ +│ │ │ │ │ │ │ │ +│ │ │ │ .gdml + │ │ Executes │ │ +│ │ │ │ .mac files │ │ simulation │ │ +│ └──────────┘ └────────────┘ └─────┬──────┘ │ +│ │ │ +│ ┌─────▼──────┐ │ +│ │ Results │ │ +│ │ Parser │ │ +│ │ │ │ +│ │ JSRoot │ │ +│ │ Plots │ │ +│ └────────────┘ │ +└───────────────────────────────────────────────────┘ +``` + +## Execution Flow + +### 1. Input Generation + +The Pyodide converter generates Geant4-native input files from the editor JSON: + +- **`geometry.gdml`** — GDML XML describing the simulation geometry (solids, volumes, placements) +- **`run.mac`** — Geant4 macro with particle source commands (`/gps/...`), scoring mesh definitions (`/score/...`), and run commands (`/run/beamOn`) + +### 2. Geant4 Wasm Worker + +`Geant4Worker/` contains the Web Worker that loads and runs the Geant4 Wasm binary: + +```typescript +// Simplified worker flow +self.onmessage = async (event) => { + const { gdml, macro } = event.data; + + // Load Geant4 Wasm + const geant4 = await loadGeant4Wasm(); + + // Write input files to the virtual filesystem + geant4.FS.writeFile('/geometry.gdml', gdml); + geant4.FS.writeFile('/run.mac', macro); + + // Execute + geant4.run(['/run.mac']); + + // Read output files from the virtual filesystem + const outputFiles = readOutputFiles(geant4.FS); + + // Post results back to main thread + self.postMessage({ type: 'results', data: outputFiles }); +}; +``` + +### 3. Dataset Download + +Geant4 requires physics **datasets** (cross-section tables, etc.) to run simulations. These are downloaded on first use: + +1. The UI detects that Geant4 datasets are needed +2. `Geant4DatasetContextProvider` triggers the download +3. Datasets are downloaded from a CDN and cached in the browser's IndexedDB +4. Subsequent runs skip the download + +:::caution +The dataset download can be several hundred MB. Users are informed of the progress and can cancel. +::: + +### 4. Progress Reporting + +The Wasm worker reports progress back to the main thread: + +```typescript +// In the worker +geant4.onProgress = (simulated, total) => { + self.postMessage({ + type: 'progress', + data: { simulatedPrimaries: simulated, requestedPrimaries: total } + }); +}; +``` + +The `Geant4LocalWorkerSimulationService` translates these into the same status format as the remote service, so the UI renders progress bars identically. + +### 5. Result Parsing + +Output files from the Geant4 run are parsed by `Geant4ResultsFileParser`: + +- Reads scorer output files from the Wasm virtual filesystem +- Converts them into the standard `Estimator` → `Page` structure +- Results are passed directly to JSRoot for rendering + +## Demo Mode + +When `REACT_APP_TARGET=demo`, the UI runs in demo mode: + +- Authentication is disabled +- Backend communication is disabled +- **Only Geant4 Wasm simulations are available** +- No job persistence (results exist only in the browser session) + +This is the default mode for `npm run dev` and for the public demo deployment. + +## Supported Features + +### Geometry + +The converter generates GDML with: +- Standard solids: `box`, `tube` (cylinder), `sphere` +- Nested volume hierarchy (recursive placement) +- Material assignments from the ICRU material database + +### Scoring + +The macro generator creates: +- `/score/create/` mesh scorers +- Dose, fluence, and other quantity scoring +- Beam definition via `/gps/` commands + +### Limitations + +- **Performance**: WebAssembly runs single-threaded and is slower than native Geant4. Complex simulations with many primaries may take several minutes. +- **Memory**: Large geometries or high-resolution scoring meshes may exhaust browser memory (typically ~2–4 GB limit). +- **Physics models**: The Wasm build may not include all Geant4 physics lists. Check the specific build for available models. + +## Webpack Configuration + +The Webpack overrides in `config-overrides.js` handle Geant4 Wasm files: + +```javascript +// Ignore the .wasm file from normal bundling +config.module.rules.push({ + test: /geant4_wasm\.wasm$/, + type: 'asset/resource' +}); + +// Ignore node:worker_threads (not available in browser) +config.resolve.fallback = { + ...config.resolve.fallback, + 'worker_threads': false +}; +``` + +## Troubleshooting + +**Geant4 datasets not downloading:** +Check the browser console for network errors. The CDN URL must be accessible. Verify IndexedDB storage quota isn't exhausted. + +**Simulation crashes:** +Check the browser console for Wasm memory errors. Reduce the number of primaries or simplify the geometry. The Wasm runtime has a hard memory limit. + +**Slow performance:** +Geant4 Wasm is single-threaded. For large simulations, use the remote backend with SHIELD-HIT12A or FLUKA instead. diff --git a/src/content/docs/frontend/overview.md b/src/content/docs/frontend/overview.md new file mode 100644 index 0000000..43ab857 --- /dev/null +++ b/src/content/docs/frontend/overview.md @@ -0,0 +1,151 @@ +--- +title: Frontend Overview +description: Architecture and structure of the YAPTIDE React + Three.js frontend. +--- + +The YAPTIDE frontend is a **React** single-page application that provides a 3D simulation editor, job management, and result visualization. It runs entirely in the browser, with optional in-browser Python and Geant4 runtimes. + +## Tech Stack + +| Component | Technology | +|---|---| +| Framework | React (TypeScript) | +| 3D engine | Three.js | +| UI kit | MUI + Emotion | +| HTTP client | `ky` (fetch wrapper) | +| Auth | Keycloak JS SDK (OIDC) | +| Python in browser | Pyodide + comlink (Web Worker) | +| Geant4 in browser | Geant4 WebAssembly (Web Worker) | +| Result rendering | JSRoot | +| Build system | Create React App + react-app-rewired | +| Testing | Jest + React Testing Library | + +## Architecture + +### No Redux, No Router + +The frontend uses **neither Redux/Zustand nor React Router**: + +- **State management**: React Context providers arranged in a deeply-nested `ServiceTree` (composition pattern). Each service exposes a custom hook. +- **Navigation**: A custom `TabPanel` system driven by string state. No URL-based routing. + +### Service Tree + +`App.tsx` composes the entire application: + +``` +ConfigProvider + └─ ThemeProvider + SnackbarProvider + └─ KeycloakAuth + └─ Store (YaptideEditor instance) + └─ Geant4DatasetContextProvider + └─ DialogProvider + └─ Auth + └─ RemoteWorkerSimulationContextProvider + └─ PythonConverterService + └─ Geant4LocalWorkerSimulationContextProvider + └─ Loader + └─ WrapperApp (the actual UI) +``` + +Each provider creates a React Context exposing a hook: + +| Context | Hook | Purpose | +|---|---|---| +| `ConfigProvider` | `useConfig()` | Backend URL, demo mode, deployment flags | +| `Store` | `useStore()` | `YaptideEditor` instance, tracked jobs, results | +| `Auth` | `useAuth()` | Login/logout, `authKy` (authenticated HTTP client) | +| `KeycloakAuth` | `useKeycloakAuth()` | Keycloak SSO state | +| `PythonConverterService` | `usePythonConverter()` | In-browser JSON→input file conversion | +| `Loader` | `useLoader()` | Load projects from files/URLs/JSON | + +### Tab Navigation + +`WrapperApp.tsx` renders a tab bar with these panels: + +| Tab | Component | Purpose | +|---|---|---| +| `login` | LoginPanel | Username/password or Keycloak SSO | +| `editor` | EditorPanel | 3D scene editor (main working area) | +| `examples` | ExamplesPanel | Load pre-built example simulations | +| `simulations` | SimulationsPanel | View/manage submitted jobs | +| `inputFiles` | InputFilesPanel | Inspect generated simulator input files | +| `results` | ResultsPanel | Visualize simulation results (JSRoot plots) | +| `about` | AboutPanel | Credits and version info | + +## Directory Structure + +``` +src/ +├── App.tsx # Service tree composition +├── config/ +│ └── ConfigService.tsx # Environment variable config +├── services/ +│ ├── AuthService.tsx # Auth context + hook +│ ├── StoreService.tsx # Editor + job state +│ ├── LoaderService.tsx # Project loading +│ ├── KeycloakAuthService.tsx +│ ├── RemoteWorkerSimulationContextProvider.tsx +│ └── Geant4LocalWorkerSimulationContextProvider.tsx +├── ThreeEditor/ +│ ├── js/ +│ │ ├── YaptideEditor.js # Core editor class (~780 lines) +│ │ ├── EditorContext.ts # Context switching (geometry/scoring/settings) +│ │ └── viewport/ # 4-way split viewport +│ ├── Simulation/ +│ │ ├── Figures/ # BoxGeometry, SphereGeometry, etc. +│ │ ├── Zones/ # BooleanZone CSG operations +│ │ ├── Detectors/ # Scoring detector types +│ │ ├── Physics/ # Beam, physics settings +│ │ └── Materials/ # Material definitions +│ └── components/ +│ └── Sidebar/ # EditorSidebar (Geometry/Scoring/Settings tabs) +├── PythonConverter/ +│ ├── PythonWorker.ts # Pyodide Web Worker +│ └── PythonConverterService.tsx # React context wrapping the worker +├── Geant4Worker/ # Geant4 Wasm Web Worker +├── JsRoot/ # JSRoot result visualization +├── WrapperApp/ # Tab navigation + panels +├── libs/ +│ └── converter/ # Git submodule → converter repo +└── util/ # Shared utilities +``` + +## Key Design Patterns + +### Signals (Event System) + +The 3D editor uses the `signals` library for internal event propagation. Over **40 signals** cover events like: +- `objectAdded`, `objectRemoved`, `objectChanged` +- `zoneGeometryChanged`, `scoringQuantityChanged` +- `editorCleared`, `sceneGraphChanged` + +React bridges these signals via `useSignal` hooks to trigger re-renders. + +### Command Pattern + +All editor mutations (add figure, move object, change material) go through a Command pattern for **undo/redo** support. See [Adding Commands](/for_developers/frontend/adding-commands/). + +### Serialization + +The editor JSON format captures the full simulation state. It is: +- Auto-saved to `localStorage` on every change +- Exportable as `.json` files +- Sent to the backend for simulation submission +- Consumed by the Pyodide converter for input file generation + +## Build System + +The project uses **Create React App** with **react-app-rewired** for Webpack customization (`config-overrides.js`): + +- `react-dnd` ESM compatibility fixes +- `.cjs`/`.mjs` module resolution +- Ignoring `geant4_wasm.wasm` from bundling +- Ignoring `node:worker_threads` + +## Related Pages + +- [3D Editor](/for_developers/frontend/3d-editor/) — editor internals and managers +- [Simulation Services](/for_developers/frontend/simulation-services/) — remote and local execution +- [Pyodide Converter](/for_developers/frontend/pyodide-converter/) — in-browser Python +- [Auth Flows](/for_developers/frontend/auth-flows/) — authentication implementation diff --git a/src/content/docs/frontend/pyodide-converter.md b/src/content/docs/frontend/pyodide-converter.md new file mode 100644 index 0000000..ce200e9 --- /dev/null +++ b/src/content/docs/frontend/pyodide-converter.md @@ -0,0 +1,152 @@ +--- +title: Pyodide Converter +description: How the Python converter runs in the browser via Pyodide WebAssembly. +--- + +The `yaptide-converter` Python package runs **in the browser** via Pyodide (Python compiled to WebAssembly). This enables real-time conversion of editor JSON to simulator input files without a server round-trip. + +## Architecture + +``` +React UI Thread Web Worker Thread +┌────────────────────┐ ┌───────────────────────────┐ +│ PythonConverterSvc │ comlink │ PythonWorker.ts │ +│ │ ◄────────► │ │ +│ convertJSON( │ │ Pyodide runtime │ +│ editorJson, │ │ └─ micropip │ +│ "shieldhit" │ │ └─ yaptide_converter│ +│ ) │ │ .whl package │ +└────────────────────┘ └───────────────────────────┘ +``` + +## Build Pipeline + +### `buildPython.js` + +Run via `npm run build-python`: + +1. Creates a Python virtual environment +2. Installs Poetry in the venv +3. Builds the converter wheel: + ```bash + poetry build -f wheel + ``` +4. Copies the `.whl` file to `public/libs/converter/dist/` + +The wheel is served as a static asset by the dev server / production build. + +> Re-run `npm run build-python` whenever the converter code changes. + +### Converter Source + +The converter is included as a **Git submodule** at `src/libs/converter/` pointing to the converter repository. If the directory is empty: + +```bash +git submodule update --init --recursive +``` + +## Runtime Initialization + +### PythonWorker.ts + +The Web Worker initializes Pyodide on first use: + +```typescript +// Simplified initialization flow +async function initPyodide() { + // 1. Load Pyodide from CDN + // Note: version numbers in this example may differ from the current codebase + const pyodide = await loadPyodide({ + indexURL: 'https://cdn.jsdelivr.net/pyodide/v0.25.0/full/' + }); + + // 2. Install micropip + await pyodide.loadPackage('micropip'); + const micropip = pyodide.pyimport('micropip'); + + // 3. Install the converter wheel from local static files + await micropip.install('/libs/converter/dist/yaptide_converter-X.Y.Z-py3-none-any.whl'); + + // 4. Import the converter API + pyodide.runPython(` + from converter.api import get_parser_from_str, run_parser + `); + + return pyodide; +} +``` + +### PythonConverterService.tsx + +The React context wraps the Web Worker with `comlink`: + +```typescript +// Simplified service +const PythonConverterService = () => { + const worker = useMemo(() => { + const raw = new Worker(new URL('./PythonWorker.ts', import.meta.url)); + return Comlink.wrap(raw); + }, []); + + const convertJSON = useCallback(async (editorJson, simType) => { + return await worker.convertJSON(editorJson, simType); + }, [worker]); + + return { convertJSON, isReady }; +}; +``` + +Usage in components: + +```typescript +const { convertJSON } = usePythonConverter(); + +// Convert editor state to SHIELD-HIT12A input files +const files = await convertJSON(editor.toJSON(), 'shieldhit'); +// files = { "beam.dat": "...", "mat.dat": "...", ... } +``` + +## Use Cases + +### 1. Input File Preview + +The **Input Files** tab shows the generated simulator input files in real-time as the user edits the scene: + +``` +User edits geometry → Editor JSON auto-updates → Pyodide converts → UI shows beam.dat, geo.dat, etc. +``` + +### 2. Geant4 Local Simulation + +For in-browser Geant4 execution: + +``` +Editor JSON → Pyodide converter → geometry.gdml + run.mac → Geant4 Wasm Worker +``` + +### 3. Server-Side Fallback + +When submitting to the backend (SHIELD-HIT12A, FLUKA), the backend runs the same converter server-side. The Pyodide version is primarily for preview and Geant4. + +## Performance + +- **First load**: ~5–10 seconds (downloading Pyodide + installing converter wheel) +- **Subsequent conversions**: ~100–500ms depending on scene complexity +- **Memory**: Pyodide typically uses 50–100 MB in the browser + +The Web Worker thread prevents conversion from blocking the UI. The `comlink` library provides a clean async API. + +## Troubleshooting + +**Converter not loading:** +Check the browser console for Pyodide loading errors. Common causes: +- CDN blocked by network policy +- Wheel file not found at `/libs/converter/dist/` +- Run `npm run build-python` if the wheel is missing + +**Stale conversion results:** +If you've updated the converter code, rebuild the wheel: +```bash +npm run build-python +``` +Then hard-refresh the browser (Ctrl+Shift+R) to clear cached assets. diff --git a/src/content/docs/frontend/simulation-services.md b/src/content/docs/frontend/simulation-services.md new file mode 100644 index 0000000..7d83c4d --- /dev/null +++ b/src/content/docs/frontend/simulation-services.md @@ -0,0 +1,191 @@ +--- +title: Simulation Services +description: How the frontend submits and tracks simulations. +--- + +The UI communicates with two types of simulation backends — a **remote Flask server** (for SHIELD-HIT12A and FLUKA) and a **local Geant4 Wasm runtime** (in-browser). Both implement the same `SimulationService` interface. + +## SimulationService Interface + +Both services expose the same contract to the UI: + +```typescript +interface SimulationService { + submitSimulation(config: SimulationConfig): Promise; // returns job_id + getStatus(jobId: string): Promise; + getResults(jobId: string): Promise; + cancelSimulation(jobId: string): Promise; +} +``` + +This abstraction lets the UI treat remote and local simulations identically. + +## Remote Simulation Service + +`services/RemoteWorkerSimulationService.ts` (~745 lines) handles SHIELD-HIT12A and FLUKA simulations on the backend server. + +### HTTP Client + +Uses `ky` (a fetch wrapper) with: +- **Base URL**: `backendUrl` from `ConfigProvider` (default: `http://localhost:5000`) +- **Credentials**: `include` (sends httpOnly cookies) +- **Response transform**: automatic `snake_case` → `camelCase` conversion + +### Submission Flow + +``` +1. User clicks "Run Simulation" in the editor +2. UI serializes the editor state to JSON +3. POST /jobs/direct: + { + sim_data: , + ntasks: , + sim_type: "shieldhit", + input_type: "editor" + } +4. Backend returns { job_id } +5. UI starts polling +``` + +### Polling Loop + +```typescript +// Simplified polling logic +async function pollJobStatus(jobId: string) { + while (true) { + const status = await authKy.get(`jobs/direct?job_id=${jobId}`).json(); + + if (status.jobState === 'COMPLETED') { + const results = await authKy.get(`results?job_id=${jobId}`).json(); + displayResults(results); + return; + } + + if (status.jobState === 'FAILED' || status.jobState === 'CANCELED') { + showError(status); + return; + } + + // Update progress bars + updateTaskProgress(status.jobTasksStatus); + + await sleep(getPollingInterval()); + } +} +``` + +The polling interval increases over time to reduce server load on long-running simulations. + +### Result Caching + +The service caches results for completed and failed jobs. If a user revisits a completed simulation, results are served from the cache without hitting the backend. + +### Batch Submissions + +For HPC cluster simulations (PLGrid), the service uses `POST /jobs/batch` with additional options: + +```typescript +{ + sim_data: , + ntasks: 100, + sim_type: "shieldhit", + batch_options: { + cluster_name: "ares", + array_options: "--time=01:00:00 --mem=4G", + collect_options: "--time=00:30:00" + } +} +``` + +Batch submissions require Keycloak authentication (PLGrid credentials). + +## Local Simulation Service (Geant4 Wasm) + +`services/Geant4LocalWorkerSimulationService.ts` runs Geant4 simulations entirely in the browser. + +### Execution Flow + +``` +1. Editor JSON + │ + ▼ +2. Pyodide Converter (Web Worker) + → geometry.gdml + run.mac + │ + ▼ +3. Geant4 Wasm Worker + → Executes simulation + → Produces output files + │ + ▼ +4. Geant4ResultsFileParser + → Parses output into estimators/pages + │ + ▼ +5. UI renders results (JSRoot) +``` + +No server communication occurs. This path works in **demo mode**. + +### Progress Tracking + +The Geant4 Wasm worker reports progress via `postMessage`: + +```typescript +// In the Wasm worker +self.postMessage({ + type: 'progress', + data: { + simulatedPrimaries: 5000, + requestedPrimaries: 10000 + } +}); +``` + +The service updates the UI's progress bars using the same interface as the remote service. + +## Result Format + +Both services produce results in the same format: + +```typescript +interface SimulationResults { + estimators: Estimator[]; +} + +interface Estimator { + name: string; + pages: Page[]; +} + +interface Page { + pageName: string; + pageDimension: number; + data: { + axes: Axis[]; + values: number[]; + errors?: number[]; + }; +} +``` + +This is rendered by the JSRoot integration in `JsRoot/`: +- **1D data** (e.g., depth-dose): line plots / histograms +- **2D data** (e.g., spatial dose maps): color map / contour plots +- Interactive zoom, pan, cursor readout +- CSV export + +## Simulation Panel + +`WrapperApp/SimulationsPanel.tsx` shows a paginated table of all the current user's simulations: + +| Column | Source | +|---|---| +| Title | Simulation title from editor | +| Status | Job state (color-coded badge) | +| Engine | `shieldhit`, `fluka`, `geant4` | +| Platform | `direct` or `batch` | +| Started | Submission timestamp | +| Actions | View results, re-run, delete | + +The panel fetches data from `GET /user/simulations` with pagination. diff --git a/src/content/docs/frontend/testing.md b/src/content/docs/frontend/testing.md new file mode 100644 index 0000000..2fdc979 --- /dev/null +++ b/src/content/docs/frontend/testing.md @@ -0,0 +1,189 @@ +--- +title: Frontend Testing +description: Testing strategy and patterns for the YAPTIDE frontend. +--- + +The frontend uses **Jest** with **React Testing Library** for unit and component tests. + +## Running Tests + +```bash +# Run all tests +npm test + +# Run with coverage +npm test -- --coverage + +# Run a specific test file +npm test -- --testPathPattern=SetPosition + +# Run in watch mode (default with npm test) +# Press 'a' to run all, 'f' to run failed, 'q' to quit +``` + +## Test Structure + +``` +src/ +├── __tests__/ +│ ├── commands/ # Editor command tests +│ ├── components/ # React component tests +│ └── services/ # Service logic tests +├── ThreeEditor/ +│ └── __tests__/ # Editor-specific tests (co-located) +└── setupTests.ts # Global test setup +``` + +## Test Setup + +`setupTests.ts` configures the test environment: + +```typescript +import '@testing-library/jest-dom'; + +// Mock Web Workers (not available in jsdom) +// Mock Pyodide, Geant4 Wasm, etc. +``` + +## Testing Editor Commands + +Commands are the most critical testable unit. Every command should be tested for: + +1. **Execute** — correct state after applying +2. **Undo** — state reverts to original +3. **Redo** — state re-applies correctly +4. **Serialization** — `toJSON()` and `fromJSON()` round-trip + +```typescript +describe('AddFigureCommand', () => { + let editor: YaptideEditor; + + beforeEach(() => { + editor = new YaptideEditor(); + }); + + test('adds figure to scene', () => { + const figure = createBoxFigure({ x: 10, y: 10, z: 10 }); + editor.execute(new AddFigureCommand(editor, figure)); + + expect(editor.figureManager.figures).toHaveLength(1); + expect(editor.figureManager.figures[0].name).toBe(figure.name); + }); + + test('undo removes the figure', () => { + const figure = createBoxFigure({ x: 10, y: 10, z: 10 }); + editor.execute(new AddFigureCommand(editor, figure)); + editor.undo(); + + expect(editor.figureManager.figures).toHaveLength(0); + }); +}); +``` + +## Testing React Components + +Use React Testing Library for component tests: + +```typescript +import { render, screen, fireEvent } from '@testing-library/react'; + +describe('LoginPanel', () => { + test('shows error on invalid credentials', async () => { + render(); + + fireEvent.change(screen.getByLabelText('Username'), { + target: { value: 'wrong' } + }); + fireEvent.change(screen.getByLabelText('Password'), { + target: { value: 'wrong' } + }); + fireEvent.click(screen.getByText('Log in')); + + expect(await screen.findByText(/invalid/i)).toBeInTheDocument(); + }); +}); +``` + +## Mocking + +### Web Workers + +Web Workers aren't available in the jsdom test environment. Mock them: + +```typescript +// __mocks__/worker.ts +class MockWorker { + onmessage: ((e: MessageEvent) => void) | null = null; + + postMessage(data: any) { + // Simulate worker response + if (this.onmessage) { + this.onmessage(new MessageEvent('message', { data: { result: {} } })); + } + } + + terminate() {} +} + +global.Worker = MockWorker as any; +``` + +### Backend API + +Mock `ky` for API tests: + +```typescript +jest.mock('ky', () => ({ + create: () => ({ + get: jest.fn().mockResolvedValue({ + json: () => Promise.resolve({ jobState: 'COMPLETED' }) + }), + post: jest.fn().mockResolvedValue({ + json: () => Promise.resolve({ jobId: 'test-123' }) + }), + }) +})); +``` + +### Three.js + +For tests that don't need rendering, mock Three.js objects: + +```typescript +jest.mock('three', () => ({ + BoxGeometry: jest.fn(), + Mesh: jest.fn(), + Scene: jest.fn(() => ({ + add: jest.fn(), + remove: jest.fn(), + children: [] + })), +})); +``` + +## Code Quality + +### ESLint + +```bash +npm run lint +``` + +Uses Create React App's ESLint preset with additional rules: +- `simple-import-sort` — enforced import ordering +- Prettier integration for formatting + +### Prettier + +```bash +npx prettier --check src/ +npx prettier --write src/ +``` + +## Best Practices + +- **Test behavior, not implementation** — use `screen.getByText()` over component internals +- **Test commands thoroughly** — they are the backbone of editor state management +- **Mock at boundaries** — mock Web Workers, APIs, and Three.js rendering, not React components +- **Keep fixtures simple** — use factory functions for creating test objects +- **No snapshot tests** — they break easily and provide little value for this codebase diff --git a/src/content/docs/index.mdx b/src/content/docs/index.mdx new file mode 100644 index 0000000..3205b11 --- /dev/null +++ b/src/content/docs/index.mdx @@ -0,0 +1,43 @@ +--- +title: YAPTIDE Developer Docs +description: A web-based IDE for Monte Carlo particle transport simulations. +--- + +import { Card, CardGrid, LinkCard } from "@astrojs/starlight/components"; + +Welcome to the YAPTIDE developer documentation! Here you can find guides and resources for setting up a development environment, understanding the architecture, and contributing to the project. +The source code is hosted on [GitHub](https://github.com/yaptide/). + +To learn how to use the app, see the [user documentation](https://yaptide.github.io/docs/). + +## How YAPTIDE Works + +YAPTIDE has three execution modes. You don't need the full stack for every one. + +| Mode | What you can do | What you need | +| ------------------- | ---------------------------------------------------------- | -------------------------------------- | +| **Demo** | Edit geometry, generate input files, run Geant4 in-browser | Just a browser | +| **Celery (direct)** | Submit SHIELD-HIT12A / FLUKA jobs to local workers | Docker stack or local backend + Redis | +| **Batch (SLURM)** | Submit to HPC clusters (e.g. PLGrid Ares) | Full stack + Keycloak + cluster access | + +## Setup + + + + Quick start with no backend needed. Set up via [Local](/for_developers/local-setup/local-frontend-demo/) or [Docker](/for_developers/docker-setup/docker-frontend-demo/). + + + Backend services and Celery workers included. Set up via [Local](/for_developers/local-setup/local-celery/) or [Docker](/for_developers/docker-setup/docker-celery/). + + + Full stack with SLURM cluster integration. Set up via [Local](/for_developers/local-setup/local-slurm/) or [Docker](/for_developers/docker-setup/docker-slurm/). + + + +## Learn about the architecture + + + + + + diff --git a/src/content/docs/local-setup/local-celery.mdx b/src/content/docs/local-setup/local-celery.mdx new file mode 100644 index 0000000..5944efd --- /dev/null +++ b/src/content/docs/local-setup/local-celery.mdx @@ -0,0 +1,303 @@ +--- +title: Local Setup — Celery Workers +description: Run the YAPTIDE backend locally with Celery workers for SHIELD-HIT12A and FLUKA simulations. +--- + +import { Tabs, TabItem } from "@astrojs/starlight/components"; + +This guide covers a full local setup — Flask API, Redis, Celery simulation worker, Celery helper worker, and the frontend. +Use this when you need to run SHIELD-HIT12A or FLUKA simulations locally with fast iteration on backend code. + +## When to use this setup + +| Scenario | Recommended setup | +| ------------------------------------------ | -------------------------------------------------------------------------- | +| Frontend / Geant4 in-browser only | [Local Frontend Demo](/for_developers/local-setup/local-frontend-demo/) | +| Full stack with SHIELD-HIT12A or FLUKA | **This page** | +| Full stack with SLURM cluster | [Local SLURM](/for_developers/local-setup/local-slurm/) | + +:::tip + Prefer Docker? See [Celery Workers — Docker](/for_developers/docker-setup/docker-celery/) for the containerized version of this setup. +::: + +## What you'll run + +You need **three terminals** for the backend, plus one for the frontend: + +| Terminal | What runs | +| -------- | ------------------------ | +| 1 | Celery simulation worker | +| 2 | Celery helper worker | +| 3 | Flask API | +| 4 | Frontend dev server | + +## Prerequisites + +- **Python 3.9+** with [Poetry](https://python-poetry.org/docs/) installed +- **Node.js 20+** with npm +- **Docker** (for Redis) +- **Git** + +## Backend setup + +### 1. Clone and install dependencies + +```bash +git clone https://github.com/yaptide/yaptide.git +cd yaptide +poetry install +``` + +This installs all dependencies (main + test + docs) into a `.venv` directory in the project root. + +### 2. Download simulator binaries + +YAPTIDE stores simulator binaries on S3 storage. The demo version of SHIELD-HIT12A is freely available: + + + + +```bash +poetry run yaptide/admin/simulators.py download-shieldhit --dir bin +``` + + + + +```powershell +poetry run yaptide\admin\simulators.py download-shieldhit --dir bin +``` + + + + +:::note +The full (licensed) versions of SHIELD-HIT12A and FLUKA are encrypted. The demo download gives you a freely usable binary for development. +::: + +### 3. Start Redis + +Redis is the message broker between Flask and the Celery workers. +The simplest approach is a Docker container: + +```bash +docker run --detach --publish 6379:6379 --name yaptide_redis_local redis:7-alpine +``` + +### 4. Start the Celery simulation worker + +This worker picks up simulation jobs from the queue and runs the simulator. It needs access to the simulator binaries (the `bin` directory from step 2). + + + + +```bash +PATH=$PATH:bin \ +BACKEND_INTERNAL_URL=http://127.0.0.1:5000 \ +CELERY_BROKER_URL=redis://127.0.0.1:6379/0 \ +CELERY_RESULT_BACKEND=redis://127.0.0.1:6379/0 \ +poetry run celery \ + --app yaptide.celery.simulation_worker worker \ + --events -P eventlet \ + --hostname yaptide-simulation-worker \ + --queues simulations \ + --loglevel=debug +``` + + + + +```powershell +$Env:PATH += ";" + (Join-Path -Path (Get-Location) -ChildPath "bin") +$env:BACKEND_INTERNAL_URL="http://127.0.0.1:5000" +$env:CELERY_BROKER_URL="redis://127.0.0.1:6379/0" +$env:CELERY_RESULT_BACKEND="redis://127.0.0.1:6379/0" +poetry run celery --app yaptide.celery.simulation_worker worker --events -P eventlet --hostname yaptide-simulation-worker --queues simulations --loglevel=debug +``` + + + + +**Why these variables?** + +- `PATH` — so the worker can find the SHIELD-HIT12A binary in `bin/` +- `BACKEND_INTERNAL_URL` — the worker reports progress back to Flask at this address +- `CELERY_BROKER_URL` / `CELERY_RESULT_BACKEND` — connect to Redis for task dispatch and result storage + +### 5. Start the Celery helper worker + +The helper worker handles post-processing tasks (collecting results, cleanup). Open a second backend terminal and go to the `yaptide/` directory. Run: + + + + +```bash +FLASK_SQLALCHEMY_DATABASE_URI=sqlite:///db.sqlite \ +BACKEND_INTERNAL_URL=http://127.0.0.1:5000 \ +CELERY_BROKER_URL=redis://127.0.0.1:6379/0 \ +CELERY_RESULT_BACKEND=redis://127.0.0.1:6379/0 \ +poetry run celery \ + --app yaptide.utils.helper_worker worker \ + --events \ + --hostname yaptide-helper-worker \ + --queues helper \ + --loglevel=debug +``` + + + + +```powershell +$env:FLASK_SQLALCHEMY_DATABASE_URI="sqlite:///db.sqlite" +$env:BACKEND_INTERNAL_URL="http://127.0.0.1:5000" +$env:CELERY_BROKER_URL="redis://127.0.0.1:6379/0" +$env:CELERY_RESULT_BACKEND="redis://127.0.0.1:6379/0" +poetry run celery --app yaptide.utils.helper_worker worker --events --hostname yaptide-helper-worker --queues helper --loglevel=debug +``` + + + + +### 6. Start the Flask API + +Open a third backend terminal and go to the `yaptide/` directory. Run: + + + + +```bash +FLASK_SQLALCHEMY_ECHO=True \ +FLASK_USE_CORS=True \ +FLASK_SQLALCHEMY_DATABASE_URI="sqlite:///db.sqlite" \ +CELERY_BROKER_URL=redis://127.0.0.1:6379/0 \ +CELERY_RESULT_BACKEND=redis://127.0.0.1:6379/0 \ +poetry run flask --debug --app yaptide.application run +``` + + + + +```powershell +$env:FLASK_SQLALCHEMY_ECHO="True" +$env:FLASK_USE_CORS="True" +$env:FLASK_SQLALCHEMY_DATABASE_URI="sqlite:///db.sqlite" +$env:CELERY_BROKER_URL="redis://127.0.0.1:6379/0" +$env:CELERY_RESULT_BACKEND="redis://127.0.0.1:6379/0" +poetry run flask --debug --app yaptide.application run +``` + + + + +**Why `FLASK_USE_CORS=True`?** During development, the frontend runs on `localhost:3000` and the backend on `localhost:5000`. Without CORS enabled, the browser blocks cross-origin requests. + +This creates `db.sqlite` inside `./instance/` (default [Flask instance folder](https://flask.palletsprojects.com/en/3.0.x/config/#instance-folders)). + +`--debug` flag enables Flask debug messages and auto-reload on code changes. + +`FLASK_SQLALCHEMY_ECHO=True` enables SQL query logging for debugging database interactions. + +### 7. Create a user + +Before logging in from the frontend, you need to create a user in the database. Open the 4th terminal and go to the `yaptide/` directory. Run: + + + + +```bash +FLASK_SQLALCHEMY_DATABASE_URI="sqlite:///instance/db.sqlite" \ +poetry run yaptide/admin/db_manage.py add-user admin --password password +``` + + + + +```powershell +$env:FLASK_SQLALCHEMY_DATABASE_URI="sqlite:///instance/db.sqlite" +poetry run yaptide\admin\db_manage.py add-user admin --password password +``` + + + + +## Frontend setup + +Exit the `yaptide/` directory in the 4th terminal and run the following commands from the root directory: + +```bash +git clone https://github.com/yaptide/ui.git +cd ui +git submodule update --init --recursive +npm install +``` + +Create a `.env` file in the `ui/` directory: + +```bash title="ui/.env" +REACT_APP_BACKEND_URL=http://localhost:5000 +``` + +```bash +npm run start +``` + +Open **http://localhost:3000**. Log in with the credentials you created (username: `admin`, password: `password`). The setup is complete now. The page reloads on edits. + +:::caution +Access both frontend and backend using the **same domain** — either both `localhost` or both `127.0.0.1`. Mixing them breaks cookie-based authentication (`SameSite=Lax` policy). +::: + +## Running tests + +For backend tests run the following command from the `yaptide/` directory: + + + + +```bash +poetry run pytest +``` + + + + +On Windows, run tests one by one: + +```powershell +Get-ChildItem -Path "tests" -Filter "test_*.py" -Recurse | foreach { poetry run pytest $_.FullName } +``` + + + + +For frontend tests, run the following command from the `ui/` directory: + +```bash +npm run test +``` + +## Code formatting + +The backend uses `yapf` for code formatting via pre-commit hooks: + +```bash +poetry run pre-commit install +``` + +After installing, hooks run automatically on every `git commit`. If a hook fails: + +1. The commit is aborted +2. Some hooks auto-fix files (like `yapf`) — just commit again +3. Other issues are reported in the terminal for manual fixing + +To run all hooks manually: + +```bash +pre-commit run --all-files +``` + +Frontend uses `prettier` for formatting, run it with: + +```bash +npm run format +``` diff --git a/src/content/docs/local-setup/local-frontend-demo.mdx b/src/content/docs/local-setup/local-frontend-demo.mdx new file mode 100644 index 0000000..460ed0a --- /dev/null +++ b/src/content/docs/local-setup/local-frontend-demo.mdx @@ -0,0 +1,64 @@ +--- +title: Frontend Demo — Local +description: Run YAPTIDE entirely in the browser using demo mode — no backend required. +--- + +In demo mode, YAPTIDE runs entirely in the browser. +Geant4 is compiled to WebAssembly and executes locally — no backend, no login, no internet connection needed after the initial load. + +## When to use this setup + +| Scenario | Recommended setup | +| ------------------------------------------ | -------------------------------------------------------------------------- | +| Frontend / Geant4 in-browser only | **This page** | +| Full stack with SHIELD-HIT12A or FLUKA | [Local Celery](/for_developers/local-setup/local-celery/) | +| Full stack with SLURM cluster | [Local SLURM](/for_developers/local-setup/local-slurm/) | + +:::tip + Prefer Docker? See [Frontend Demo — Docker](/for_developers/docker-setup/docker-frontend-demo/) for the containerized version of this setup. +::: + +## Prerequisites + +- **Node.js 20+** with npm +- **Python 3.9+** with pip and venv — needed to build the converter into a Pyodide/WebAssembly module +- **Git** + +## Setup + +```bash +git clone https://github.com/yaptide/ui.git +cd ui +git submodule update --init --recursive +npm install +``` + +`git submodule update` pulls the [converter](https://github.com/yaptide/converter) submodule. +The build step compiles it into a WebAssembly module via Pyodide, which is why Python is required even for frontend-only work. + +## Start demo mode + +```bash +npm run start-demo +``` + +Open **http://localhost:3000/web_dev**. + +You get the full editor experience — geometry, beam, scoring, input file generation — plus Geant4 WebAssembly for in-browser simulation. No login required. + +## Production build + +```bash +npm run build-demo +``` + +Output goes to `build/`. + +## Useful commands + +| Command | Description | +| -------------------- | ------------------------------- | +| `npm run start-demo` | Demo mode (no backend) | +| `npm run build-demo` | Production build with demo mode | +| `npm run format` | Run code formatter | +| `npm run test` | Run tests in watch mode | diff --git a/src/content/docs/local-setup/local-slurm.mdx b/src/content/docs/local-setup/local-slurm.mdx new file mode 100644 index 0000000..8775bdf --- /dev/null +++ b/src/content/docs/local-setup/local-slurm.mdx @@ -0,0 +1,25 @@ +--- +title: Local Setup — SLURM +description: Run the YAPTIDE backend locally with Keycloak authentication for SLURM integration development. +--- + +import { Tabs, TabItem } from "@astrojs/starlight/components"; + +This guide covers a full local setup with Keycloak authentication — required for SLURM job submission. +It builds on the [Celery worker setup](/for_developers/local-setup/local-celery/) and adds Keycloak for both the backend and frontend. + +## When to use this setup + +| Scenario | Recommended setup | +| ------------------------------------------ | -------------------------------------------------------------------------- | +| Frontend / Geant4 in-browser only | [Local Frontend Demo](/for_developers/local-setup/local-frontend-demo/) | +| Full stack with SHIELD-HIT12A or FLUKA | [Local Celery](/for_developers/local-setup/local-celery/) | +| Full stack with SLURM cluster | **This page** | + +:::tip + Prefer Docker? See [Docker — SLURM](/for_developers/docker-setup/docker-slurm/) for the containerized version of this setup. +::: + +:::caution[Work in Progress] + This page is not yet complete — we are still working on the SLURM integration and will update this guide once it's ready. In the meantime, see the [Local — Celery](/for_developers/local-setup/local-celery/) guide for the Celery setup. +::: diff --git a/src/styles/custom.css b/src/styles/custom.css new file mode 100644 index 0000000..3e2c11c --- /dev/null +++ b/src/styles/custom.css @@ -0,0 +1,59 @@ +/* YAPTIDE Documentation — Custom Styles */ + +:root { + --sl-color-accent-low: #1a1f36; + --sl-color-accent: #4f6df5; + --sl-color-accent-high: #b4c1ff; + --sl-font: 'Inter', -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, sans-serif; + --sl-content-width: 55rem; +} + +:root[data-theme='light'] { + --sl-color-accent-low: #dce3ff; + --sl-color-accent: #3451d1; + --sl-color-accent-high: #182463; +} + +/* Card grid styling for landing page */ +.card-grid { + display: grid; + grid-template-columns: repeat(auto-fit, minmax(250px, 1fr)); + gap: 1rem; + margin: 1.5rem 0; +} + +.card-grid > .card { + border: 1px solid var(--sl-color-gray-5); + border-radius: 0.5rem; + padding: 1.25rem; + transition: border-color 0.2s; +} + + + +.card-grid > .card:hover { + border-color: var(--sl-color-accent); +} + +.card-grid > .card h3 { + margin-top: 0; +} + +/* Tighter spacing for blockquote callouts */ +blockquote { + border-left: 3px solid var(--sl-color-accent); + padding: 0.5rem 1rem; + margin: 1rem 0; +} + +/* Badge-like tech stack labels */ +.tech-badge { + display: inline-block; + background: var(--sl-color-gray-6); + color: var(--sl-color-white); + padding: 0.15rem 0.5rem; + border-radius: 0.25rem; + font-size: 0.8rem; + margin-right: 0.35rem; + margin-bottom: 0.35rem; +} diff --git a/tsconfig.json b/tsconfig.json new file mode 100644 index 0000000..8bf91d3 --- /dev/null +++ b/tsconfig.json @@ -0,0 +1,5 @@ +{ + "extends": "astro/tsconfigs/strict", + "include": [".astro/types.d.ts", "**/*"], + "exclude": ["dist"] +}