diff --git a/.gitignore b/.gitignore index f5e96dbfaec8bd23554e839a582259cf17837f26..ce63e47be313cf2f6d5eb22651985ec2dc6d4244 100644 --- a/.gitignore +++ b/.gitignore @@ -1 +1,2 @@ -venv \ No newline at end of file +venv +dist \ No newline at end of file diff --git a/README.md b/README.md index 571cc7b76c6a02f9286a86b1a95511771ee576ec..0aa6d45560505edd507363182660f31cf85daf94 100644 --- a/README.md +++ b/README.md @@ -1 +1,41 @@ -# Talp pages \ No newline at end of file +![TALP LOGO](https://pm.bsc.es/gitlab/dlb/talp-pages/-/raw/add-v1/talp_dlb_logo.png){height=250px} + +# Talp Pages + +TALP Pages is a collection of Python scripts to postprocess the `json` outputs of [DLB TALP](https://pm.bsc.es/ftp/dlb/doc/user-guide/intro.html#talp-tracking-application-live-performance) and Gitlab pipeline snippets [that can be included](https://docs.gitlab.com/ee/ci/yaml/#include) in your project. +This makes it easy to integrate TALP into your CI/CD setup and run Continous Benchmarking without having to code up your own solution. + +**We provide:** + +- talp_pages: Command line tool to generate static html pages +- Artifact management: A easy way to use Gitlab Artifacts to generate time series data plots. +- Reusable Jobs that easily integrate into a existing Gitlab CI enviroment + +## Use python package + +Talp-Pages is written in Python (3.9+). We rely on [poetry](https://python-poetry.org/) for packaging. +To use, simply install via: + +```pip install talp-pages``` + +From there you should have the following command-line tools available: + +- `talp_report` +- `talp_add_to_db` +- `talp_badge` +- `talp_report_ts` +- `talp_pages` +- `talp_download_artifacts` + +## Use Gitlab Jobs + +In order to use the GitlabJobs to generate the Talp Pages automagically, just adopt the configuration showcased in the [example application](https://gitlab.com/valentin.seitz1/sample-application) + +We also provide documentation on the individual jobs: + +- [add-to-db](gitlab-templates/add-to-db/README.md) +- [generate-html](gitlab-templates/generate-html/README.md) + +## License + +Talp-Pages is available under the General Public License v3.0. diff --git a/gitlab-templates/add-to-db/README.md b/gitlab-templates/add-to-db/README.md new file mode 100644 index 0000000000000000000000000000000000000000..2cd62be23e1d88a50752a95d472d62155399fe72 --- /dev/null +++ b/gitlab-templates/add-to-db/README.md @@ -0,0 +1,22 @@ +# Add to DB + +This Job "template" uses adds the `.json` file of the just happend execution into the TALP database and exposes the artifacts in the `./talp` path. + +## Example Usage + +Note, that you need to setup a [Personal Access token](https://docs.gitlab.com/ee/user/profile/personal_access_tokens.html) in the `enviroment` the job will be executed in. This token needs to be [sadded as a variable](https://docs.gitlab.com/ee/ci/variables/#define-a-cicd-variable-in-the-ui) called `PAT_TOKEN`, which is able to read the artifacts API of gitlab. + +**Make sure to hide the visibility and configure it properly.** + +```yaml +include: + - remote: https://pm.bsc.es/gitlab/dlb/talp-pages/-/raw//gitlab-templates/add-to-db/template.yml + inputs: + stage: deploy # Stage in which the job will run + generating_job: talp-performance-run #Job that generated the .json file (we will grab artifacts from there) + talp_output: ./talp.json # The output name of the json file + enviroment: production # The enviroment the pipeline runs in + gitlab_url: https://gitlab.com # The url of the gitlab instance it runs on (needed for the API request) + project_name: / # The project slug of the repository the jobs should run in + job_name: talp-create-artifacts # The name of the job (needed for the generate-html job) +``` diff --git a/gitlab-templates/add-to-db/template.yml b/gitlab-templates/add-to-db/template.yml new file mode 100644 index 0000000000000000000000000000000000000000..a1129f120dffab6d6ac1dcd3645563ee2727e899 --- /dev/null +++ b/gitlab-templates/add-to-db/template.yml @@ -0,0 +1,39 @@ +spec: + inputs: + stage: + default: performance + generating_job: # From which we will get the talp.json + default: talp-run + talp_output: # filename of the json generated by talp + default: ./talp.json + enviroment: + default: production + gitlab_url: + default: https://gitlab.com + project_name: + default: None + job_name: + default: talp-add-to-db-job +--- +$[[ inputs.job_name ]]: + needs: + - job: $[[ inputs.generating_job ]] + artifacts: true + dependencies: + - $[[ inputs.generating_job ]] + environment: $[[ inputs.enviroment ]] + image: python:3.12-bullseye + before_script: + - pip install talp-pages + script: + - echo "Downloading last execution" + - talp_download_artifacts --gitlab-url $[[ inputs.gitlab_url ]] --project-name $[[ inputs.project_name ]] --job-name $CI_JOB_NAME --gitlab-token $PAT_TOKEN --output-file talp.zip --log-level=DEBUG + - unzip talp.zip || if ! [[ -d talp ]]; then mkdir talp; echo "Creating an empty talp directory --> First RUN?"; fi + - talp_add_to_db -i $[[ inputs.talp_output ]] -db talp/TALP.db + - cp talp.json talp/latest_talp_run.json + stage: $[[ inputs.stage ]] + artifacts: + paths: + - talp/ + + diff --git a/gitlab-templates/generate-html/README.md b/gitlab-templates/generate-html/README.md new file mode 100644 index 0000000000000000000000000000000000000000..fd2a60175cd2368af839fa1569bf8d8595c569bd --- /dev/null +++ b/gitlab-templates/generate-html/README.md @@ -0,0 +1,21 @@ +# Generate HTML + +This Job "template" finally generates the html from a given database and json file using `talp_pages` + +## Example Usage + +```yaml +- remote: https://pm.bsc.es/gitlab/dlb/talp-pages/-/raw//gitlab-templates/generate-html/template.yml + inputs: + stage: deploy # Stage in which the job will run + generating_job: talp-create-artifacts # The name of the add-to-db job (or any job that produces the matching artifacts) + enviroment: production # The enviroment the pipeline runs in + job_name: talp-gen-html # The name of the job. (Normally is needed to include in the pages job like shown below) + +pages: + ... + needs: + - job: talp-gen-html + optional: true + ... +``` diff --git a/gitlab-templates/generate-html/template.yml b/gitlab-templates/generate-html/template.yml new file mode 100644 index 0000000000000000000000000000000000000000..fb4836b32a8c5185766e5176c5adc31e467f6bb9 --- /dev/null +++ b/gitlab-templates/generate-html/template.yml @@ -0,0 +1,32 @@ +spec: + inputs: + stage: + default: deploy + generating_job: # From which we will get the talp.db and latest_talp_run.json + default: talp-run + enviroment: + default: production + job_name: + default: talp-generate-html +--- +$[[ inputs.job_name ]]: + stage: $[[ inputs.stage ]] + needs: + - job: $[[ inputs.generating_job ]] + artifacts: true + dependencies: + - $[[ inputs.generating_job ]] + environment: $[[ inputs.enviroment ]] + image: python:3.12-bullseye + before_script: + - - pip install talp-pages + script: + - mkdir -p public + - cd public + - talp_pages -j ../talp/latest_talp_run.json -d ../talp/TALP.db + - echo "Generated TALP pages" + artifacts: + paths: + - public + + diff --git a/poetry.lock b/poetry.lock new file mode 100644 index 0000000000000000000000000000000000000000..a2c8359ab97d3fa354342e0aa921343b009f105f --- /dev/null +++ b/poetry.lock @@ -0,0 +1,449 @@ +# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. + +[[package]] +name = "certifi" +version = "2024.2.2" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, + {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.3.2" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, +] + +[[package]] +name = "idna" +version = "3.7" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, + {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, +] + +[[package]] +name = "jinja2" +version = "3.1.4" +description = "A very fast and expressive template engine." +optional = false +python-versions = ">=3.7" +files = [ + {file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"}, + {file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "markupsafe" +version = "2.1.5" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.7" +files = [ + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, + {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, +] + +[[package]] +name = "numpy" +version = "1.26.4" +description = "Fundamental package for array computing in Python" +optional = false +python-versions = ">=3.9" +files = [ + {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"}, + {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d209d8969599b27ad20994c8e41936ee0964e6da07478d6c35016bc386b66ad4"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:62b8e4b1e28009ef2846b4c7852046736bab361f7aeadeb6a5b89ebec3c7055a"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a4abb4f9001ad2858e7ac189089c42178fcce737e4169dc61321660f1a96c7d2"}, + {file = "numpy-1.26.4-cp310-cp310-win32.whl", hash = "sha256:bfe25acf8b437eb2a8b2d49d443800a5f18508cd811fea3181723922a8a82b07"}, + {file = "numpy-1.26.4-cp310-cp310-win_amd64.whl", hash = "sha256:b97fe8060236edf3662adfc2c633f56a08ae30560c56310562cb4f95500022d5"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a"}, + {file = "numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20"}, + {file = "numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2"}, + {file = "numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218"}, + {file = "numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b"}, + {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b"}, + {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed"}, + {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a"}, + {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0"}, + {file = "numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110"}, + {file = "numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7349ab0fa0c429c82442a27a9673fc802ffdb7c7775fad780226cb234965e53c"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:52b8b60467cd7dd1e9ed082188b4e6bb35aa5cdd01777621a1658910745b90be"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5241e0a80d808d70546c697135da2c613f30e28251ff8307eb72ba696945764"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f870204a840a60da0b12273ef34f7051e98c3b5961b61b0c2c1be6dfd64fbcd3"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:679b0076f67ecc0138fd2ede3a8fd196dddc2ad3254069bcb9faf9a79b1cebcd"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:47711010ad8555514b434df65f7d7b076bb8261df1ca9bb78f53d3b2db02e95c"}, + {file = "numpy-1.26.4-cp39-cp39-win32.whl", hash = "sha256:a354325ee03388678242a4d7ebcd08b5c727033fcff3b2f536aea978e15ee9e6"}, + {file = "numpy-1.26.4-cp39-cp39-win_amd64.whl", hash = "sha256:3373d5d70a5fe74a2c1bb6d2cfd9609ecf686d47a2d7b1d37a8f3b6bf6003aea"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:afedb719a9dcfc7eaf2287b839d8198e06dcd4cb5d276a3df279231138e83d30"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95a7476c59002f2f6c590b9b7b998306fba6a5aa646b1e22ddfeaf8f78c3a29c"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7e50d0a0cc3189f9cb0aeb3a6a6af18c16f59f004b866cd2be1c14b36134a4a0"}, + {file = "numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"}, +] + +[[package]] +name = "pandas" +version = "2.2.2" +description = "Powerful data structures for data analysis, time series, and statistics" +optional = false +python-versions = ">=3.9" +files = [ + {file = "pandas-2.2.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:90c6fca2acf139569e74e8781709dccb6fe25940488755716d1d354d6bc58bce"}, + {file = "pandas-2.2.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c7adfc142dac335d8c1e0dcbd37eb8617eac386596eb9e1a1b77791cf2498238"}, + {file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4abfe0be0d7221be4f12552995e58723c7422c80a659da13ca382697de830c08"}, + {file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8635c16bf3d99040fdf3ca3db669a7250ddf49c55dc4aa8fe0ae0fa8d6dcc1f0"}, + {file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:40ae1dffb3967a52203105a077415a86044a2bea011b5f321c6aa64b379a3f51"}, + {file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8e5a0b00e1e56a842f922e7fae8ae4077aee4af0acb5ae3622bd4b4c30aedf99"}, + {file = "pandas-2.2.2-cp310-cp310-win_amd64.whl", hash = "sha256:ddf818e4e6c7c6f4f7c8a12709696d193976b591cc7dc50588d3d1a6b5dc8772"}, + {file = "pandas-2.2.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:696039430f7a562b74fa45f540aca068ea85fa34c244d0deee539cb6d70aa288"}, + {file = "pandas-2.2.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8e90497254aacacbc4ea6ae5e7a8cd75629d6ad2b30025a4a8b09aa4faf55151"}, + {file = "pandas-2.2.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58b84b91b0b9f4bafac2a0ac55002280c094dfc6402402332c0913a59654ab2b"}, + {file = "pandas-2.2.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d2123dc9ad6a814bcdea0f099885276b31b24f7edf40f6cdbc0912672e22eee"}, + {file = "pandas-2.2.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:2925720037f06e89af896c70bca73459d7e6a4be96f9de79e2d440bd499fe0db"}, + {file = "pandas-2.2.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0cace394b6ea70c01ca1595f839cf193df35d1575986e484ad35c4aeae7266c1"}, + {file = "pandas-2.2.2-cp311-cp311-win_amd64.whl", hash = "sha256:873d13d177501a28b2756375d59816c365e42ed8417b41665f346289adc68d24"}, + {file = "pandas-2.2.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:9dfde2a0ddef507a631dc9dc4af6a9489d5e2e740e226ad426a05cabfbd7c8ef"}, + {file = "pandas-2.2.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:e9b79011ff7a0f4b1d6da6a61aa1aa604fb312d6647de5bad20013682d1429ce"}, + {file = "pandas-2.2.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1cb51fe389360f3b5a4d57dbd2848a5f033350336ca3b340d1c53a1fad33bcad"}, + {file = "pandas-2.2.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eee3a87076c0756de40b05c5e9a6069c035ba43e8dd71c379e68cab2c20f16ad"}, + {file = "pandas-2.2.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3e374f59e440d4ab45ca2fffde54b81ac3834cf5ae2cdfa69c90bc03bde04d76"}, + {file = "pandas-2.2.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:43498c0bdb43d55cb162cdc8c06fac328ccb5d2eabe3cadeb3529ae6f0517c32"}, + {file = "pandas-2.2.2-cp312-cp312-win_amd64.whl", hash = "sha256:d187d355ecec3629624fccb01d104da7d7f391db0311145817525281e2804d23"}, + {file = "pandas-2.2.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0ca6377b8fca51815f382bd0b697a0814c8bda55115678cbc94c30aacbb6eff2"}, + {file = "pandas-2.2.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9057e6aa78a584bc93a13f0a9bf7e753a5e9770a30b4d758b8d5f2a62a9433cd"}, + {file = "pandas-2.2.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:001910ad31abc7bf06f49dcc903755d2f7f3a9186c0c040b827e522e9cef0863"}, + {file = "pandas-2.2.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66b479b0bd07204e37583c191535505410daa8df638fd8e75ae1b383851fe921"}, + {file = "pandas-2.2.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a77e9d1c386196879aa5eb712e77461aaee433e54c68cf253053a73b7e49c33a"}, + {file = "pandas-2.2.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:92fd6b027924a7e178ac202cfbe25e53368db90d56872d20ffae94b96c7acc57"}, + {file = "pandas-2.2.2-cp39-cp39-win_amd64.whl", hash = "sha256:640cef9aa381b60e296db324337a554aeeb883ead99dc8f6c18e81a93942f5f4"}, + {file = "pandas-2.2.2.tar.gz", hash = "sha256:9e79019aba43cb4fda9e4d983f8e88ca0373adbb697ae9c6c43093218de28b54"}, +] + +[package.dependencies] +numpy = [ + {version = ">=1.22.4", markers = "python_version < \"3.11\""}, + {version = ">=1.23.2", markers = "python_version == \"3.11\""}, + {version = ">=1.26.0", markers = "python_version >= \"3.12\""}, +] +python-dateutil = ">=2.8.2" +pytz = ">=2020.1" +tzdata = ">=2022.7" + +[package.extras] +all = ["PyQt5 (>=5.15.9)", "SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)", "beautifulsoup4 (>=4.11.2)", "bottleneck (>=1.3.6)", "dataframe-api-compat (>=0.1.7)", "fastparquet (>=2022.12.0)", "fsspec (>=2022.11.0)", "gcsfs (>=2022.11.0)", "html5lib (>=1.1)", "hypothesis (>=6.46.1)", "jinja2 (>=3.1.2)", "lxml (>=4.9.2)", "matplotlib (>=3.6.3)", "numba (>=0.56.4)", "numexpr (>=2.8.4)", "odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "pandas-gbq (>=0.19.0)", "psycopg2 (>=2.9.6)", "pyarrow (>=10.0.1)", "pymysql (>=1.0.2)", "pyreadstat (>=1.2.0)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "qtpy (>=2.3.0)", "s3fs (>=2022.11.0)", "scipy (>=1.10.0)", "tables (>=3.8.0)", "tabulate (>=0.9.0)", "xarray (>=2022.12.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)", "zstandard (>=0.19.0)"] +aws = ["s3fs (>=2022.11.0)"] +clipboard = ["PyQt5 (>=5.15.9)", "qtpy (>=2.3.0)"] +compression = ["zstandard (>=0.19.0)"] +computation = ["scipy (>=1.10.0)", "xarray (>=2022.12.0)"] +consortium-standard = ["dataframe-api-compat (>=0.1.7)"] +excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)"] +feather = ["pyarrow (>=10.0.1)"] +fss = ["fsspec (>=2022.11.0)"] +gcp = ["gcsfs (>=2022.11.0)", "pandas-gbq (>=0.19.0)"] +hdf5 = ["tables (>=3.8.0)"] +html = ["beautifulsoup4 (>=4.11.2)", "html5lib (>=1.1)", "lxml (>=4.9.2)"] +mysql = ["SQLAlchemy (>=2.0.0)", "pymysql (>=1.0.2)"] +output-formatting = ["jinja2 (>=3.1.2)", "tabulate (>=0.9.0)"] +parquet = ["pyarrow (>=10.0.1)"] +performance = ["bottleneck (>=1.3.6)", "numba (>=0.56.4)", "numexpr (>=2.8.4)"] +plot = ["matplotlib (>=3.6.3)"] +postgresql = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "psycopg2 (>=2.9.6)"] +pyarrow = ["pyarrow (>=10.0.1)"] +spss = ["pyreadstat (>=1.2.0)"] +sql-other = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)"] +test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)"] +xml = ["lxml (>=4.9.2)"] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "python-gitlab" +version = "4.4.0" +description = "A python wrapper for the GitLab API" +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "python-gitlab-4.4.0.tar.gz", hash = "sha256:1d117bf7b433ae8255e5d74e72c660978f50ee85eb62248c9fb52ef43c3e3814"}, + {file = "python_gitlab-4.4.0-py3-none-any.whl", hash = "sha256:cdad39d016f59664cdaad0f878f194c79cb4357630776caa9a92c1da25c8d986"}, +] + +[package.dependencies] +requests = ">=2.25.0" +requests-toolbelt = ">=0.10.1" + +[package.extras] +autocompletion = ["argcomplete (>=1.10.0,<3)"] +yaml = ["PyYaml (>=6.0.1)"] + +[[package]] +name = "pytz" +version = "2024.1" +description = "World timezone definitions, modern and historical" +optional = false +python-versions = "*" +files = [ + {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, + {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, +] + +[[package]] +name = "requests" +version = "2.31.0" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.7" +files = [ + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "requests-toolbelt" +version = "1.0.0" +description = "A utility belt for advanced users of python-requests" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "requests-toolbelt-1.0.0.tar.gz", hash = "sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6"}, + {file = "requests_toolbelt-1.0.0-py2.py3-none-any.whl", hash = "sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06"}, +] + +[package.dependencies] +requests = ">=2.0.1,<3.0.0" + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "tzdata" +version = "2024.1" +description = "Provider of IANA time zone data" +optional = false +python-versions = ">=2" +files = [ + {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"}, + {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, +] + +[[package]] +name = "urllib3" +version = "2.2.1" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.8" +files = [ + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[metadata] +lock-version = "2.0" +python-versions = "^3.9" +content-hash = "8278a0a9080a212e794fe707988533614136c4f682b543ae7d4310053029cc09" diff --git a/pyproject.toml b/pyproject.toml index f99b18ac985977419c97b8c0361a31084037e0ec..b98496923839a7028eb4a998797b787fa832b7a8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,13 +1,39 @@ [tool.poetry] name = "talp-pages" -version = "0.0.1" -description = "" +version = "3.4.0-alpha1" +description = "Command line tools to generate performance reports using the TALP module of DLB" authors = ["Valentin Seitz "] +maintainers = ["Valentin Seitz "] readme = "README.md" +license = "GPL-3.0-only" +keywords = ["HPC", "Performance Analysis","Profiling","Continuous Performance Analysis"] +classifiers = [ + "Intended Audience :: Science/Research", + "Operating System :: OS Independent", + "Topic :: Scientific/Engineering :: Visualization", + "Topic :: System :: Distributed Computing" +] + +homepage = "https://pm.bsc.es/gitlab/dlb/talp-pages/" +repository= "https://pm.bsc.es/gitlab/dlb/talp-pages/" +include = [ + "talp_pages/templates/*.jinja" +] [tool.poetry.dependencies] -python = "^3.4" +python = "^3.9" +python-gitlab = "^4.4.0" +jinja2 = "^3.1.4" +pandas = "^2.2.2" + +[tool.poetry.scripts] +talp_add_to_db = 'talp_pages.talp_add_to_db:main' +talp_report = 'talp_pages.talp_report:main' +talp_badge = 'talp_pages.talp_badge:main' +talp_report_ts = 'talp_pages.talp_report_ts:main' +talp_pages = 'talp_pages.talp_pages:main' +talp_download_artifacts = 'talp_pages.download_artifacts:main' [build-system] requires = ["poetry-core"] diff --git a/talp_dlb_logo.png b/talp_dlb_logo.png new file mode 100644 index 0000000000000000000000000000000000000000..2c2717ea35fd62f643608d670c6e59877b590203 Binary files /dev/null and b/talp_dlb_logo.png differ diff --git a/talp_pages/__init__.py b/talp_pages/__init__.py old mode 100644 new mode 100755 diff --git a/talp_pages/download_artifacts.py b/talp_pages/download_artifacts.py new file mode 100755 index 0000000000000000000000000000000000000000..7b563d1860dd1374cd2ec709154f3b633295cd5c --- /dev/null +++ b/talp_pages/download_artifacts.py @@ -0,0 +1,90 @@ +#!/usr/bin/env python + +import os +import argparse +import gitlab +import logging + +""" + Script to automagically choose a suitable authentication method and download the .zip archive of the last sucessfull + execution of that jobname on the branch: ref-name + Requires python-gitlab to be installed + + Default behaviour: create empty zip file +""" + + +def download_artifacts(gl, project_name, job_name, ref_name, gitlab_token=None, output_file=None): + try: + project = gl.projects.get(project_name) + except gitlab.exceptions.GitlabGetError as e: + logging.error("Failed to get project details: %s", str(e)) + return + + logging.debug("Found project ID: %s", project.id) + + # Get artifacts for the job + try: + artifacts_file = project.artifacts.download( + ref_name=ref_name, job=job_name.strip()) + logging.info("Artifacts downloaded successfully: %s", output_file) + except Exception as e: + logging.critical( + "Wasnt able to download artifacts, continue with a empty zip directory") + artifacts_file = b'PK\x05\x06\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + + with open(output_file, 'wb') as f: + f.write(artifacts_file) + + +def main(): + parser = argparse.ArgumentParser( + description="Download artifacts from the latest successful GitLab CI job") + parser.add_argument( + "--gitlab-url", help="GitLab URL (the thing before /api/*)", required=True) + parser.add_argument( + "--project-name", help="GitLab project name (format: namespace/project)", required=True) + parser.add_argument( + "--job-name", help="Name of the GitLab CI job from which to download the artifact", required=True) + parser.add_argument( + "--output-file", help="Output file name for downloaded artifacts", required=True) + parser.add_argument( + "--ref-name", help="GitLab branch name", default="main") + parser.add_argument( + "--gitlab-token", help="Personal GitLab access token (if not specified it will try to use a job_token)") + parser.add_argument( + "--log-level", help="Logging level (DEBUG, INFO, WARNING, ERROR)", default="INFO") + + args = parser.parse_args() + + log_level = getattr(logging, args.log_level.upper(), None) + if not isinstance(log_level, int): + raise ValueError("Invalid log level: %s" % args.log_level) + + logging.basicConfig(level=log_level) + job_gitlab_token = None + gitlab_token = None + + if args.gitlab_token: + gitlab_token = args.gitlab_token + else: + job_gitlab_token = os.getenv("CI_JOB_TOKEN") + print(job_gitlab_token) + logging.debug("Using the CI_JOB_TOKEN variant") + + gl = None + if gitlab_token: + gl = gitlab.Gitlab(args.gitlab_url, private_token=gitlab_token) + gl.auth() + elif job_gitlab_token: + gl = gitlab.Gitlab(args.gitlab_url, job_token=job_gitlab_token) + logging.debug("Using the CI_JOB_TOKEN variant") + else: + gl = gitlab.Gitlab(args.gitlab_url) + + download_artifacts(gl, args.project_name, args.job_name, + args.ref_name, args.gitlab_token, args.output_file,) + + +if __name__ == "__main__": + main() diff --git a/talp_pages/talp_add_to_db.py b/talp_pages/talp_add_to_db.py new file mode 100755 index 0000000000000000000000000000000000000000..07b3295c33a34f9b0a8139c7febf8aeef510faa8 --- /dev/null +++ b/talp_pages/talp_add_to_db.py @@ -0,0 +1,97 @@ +# myapp/app.py +import argparse +import json +import os + +from datetime import datetime +import sqlite3 +import logging +from .talp_common import TALP_TABLE_NAME, TALP_TABLE_COLUMNS_WITH_DATATYPES, TALP_TABLE_COLUMNS + + +# Function to insert data into the SQLite database +def insert_data(conn, timestamp, talp_output, metadata): + # Connect to the SQLite database + cursor = conn.cursor() + + try: + # Create a table if it doesn't exist + cursor.execute( + f"CREATE TABLE IF NOT EXISTS {TALP_TABLE_NAME} {TALP_TABLE_COLUMNS_WITH_DATATYPES}") + + # Create an index on the timestamp column + cursor.execute( + f"CREATE INDEX IF NOT EXISTS idx_timestamp ON {TALP_TABLE_NAME} (timestamp)") + + # Convert JSON objects to string format + + # Insert data into the table + cursor.execute(f"INSERT INTO {TALP_TABLE_NAME} {TALP_TABLE_COLUMNS} VALUES (?, ?, ?)", ( + timestamp, json.dumps(talp_output), json.dumps(metadata))) + + # Commit changes and close the connection + conn.commit() + logging.debug("Data inserted successfully") + except sqlite3.Error as e: + logging.critical("ERROR inserting data:", e) + finally: + # Close the connection + conn.close() + + +def main(): + # Parse command-line arguments + parser = argparse.ArgumentParser( + description='Add talp.json to the local time series database') + parser.add_argument('-i', '--input', dest='talp', + help='talp.json file to be added', required=True) + parser.add_argument('-m', '--metadata', dest='metadata', + help='metadata.json file to be added', required=False) + parser.add_argument('-db', '--database', dest='database', + help='TALP.db file. If not specified a new one will be generated', required=False) + # TODO add timestamp mechanism + args = parser.parse_args() + + # Check if the JSON file exists + if not os.path.exists(args.talp): + logging.error(f"The specified JSON file '{args.talp}' does not exist.") + return + + if args.metadata: + if not os.path.exists(args.metadata): + logging.error( + f"The specified JSON file '{args.metadata}' does not exist.") + return + + # Set output + if args.database: + DB_FILE = args.database + else: + DB_FILE = "TALP.db" + + # Connect to database + conn = sqlite3.connect(DB_FILE) + + current_timestamp = datetime.now() + + with open(args.talp, 'r') as json_file: + try: + talp_output = json.load(json_file) + except json.JSONDecodeError as e: + logging.error(f"Error decoding JSON: {e}") + return + if args.metadata: + with open(args.metadata, 'r') as json_file: + try: + metadata = json.load(json_file) + except json.JSONDecodeError as e: + logging.error(f"Error decoding JSON: {e}") + return + else: + metadata = {} + + insert_data(conn, current_timestamp, talp_output, metadata) + + +if __name__ == "__main__": + main() diff --git a/talp_pages/talp_badge.py b/talp_pages/talp_badge.py new file mode 100755 index 0000000000000000000000000000000000000000..c98bbcf73263031a180e0704ee0e6149a68c5027 --- /dev/null +++ b/talp_pages/talp_badge.py @@ -0,0 +1,99 @@ +#!/usr/bin/env python + +import argparse +import json +import os +from urllib.request import urlopen, Request +import logging +from .talp_common import TALP_POP_METRICS_KEY,TALP_DEFAULT_REGION_NAME + +class TalpBadge: + def __init__(self, talp_file): + # Now directly try to create a connection + with open(talp_file, 'r') as json_file: + # dont catch exception, but fail + self.raw_data = json.load(json_file) + + logging.debug(f"Created TalpReport and read the json: {self.raw_data}") + # do some sanity checks + if not self.raw_data[TALP_POP_METRICS_KEY]: + logging.error(f"No {TALP_POP_METRICS_KEY} found in {talp_file}. Try re-running DLB with arguments --talp --talp-summary=pop-metrics --talp-file={talp_file}") + raise Exception(f"No {TALP_POP_METRICS_KEY} found") + + def get_badge_svg(self): + + parallel_efficiency = None + pop_metric_regions = self.raw_data[TALP_POP_METRICS_KEY] + for region in pop_metric_regions: + if region['name'] == TALP_DEFAULT_REGION_NAME: + parallel_efficiency = region['parallelEfficiency'] + + + if not parallel_efficiency: + raise Exception(f"Could not find {TALP_DEFAULT_REGION_NAME} in provided json") + + parallel_efficiency = round(parallel_efficiency, 2) + + if parallel_efficiency < 0.6: + bagde_url = f"https://img.shields.io/badge/Parallel_efficiency-{parallel_efficiency}-red" + elif parallel_efficiency < 0.8: + bagde_url = f"https://img.shields.io/badge/Parallel_efficiency-{parallel_efficiency}-orange" + else: + bagde_url = f"https://img.shields.io/badge/Parallel_efficiency-{parallel_efficiency}-green" + + return urlopen(Request(url=bagde_url, headers={'User-Agent': 'Mozilla'})).read() + + + +def _validate_inputs(args): + output_file=None + input_file=None + # Check if the JSON file exists + if not os.path.exists(args.input): + raise Exception( + f"Error: The specified JSON file '{args.json_file}' does not exist.") + else: + input_file= args.input + # Set output + if args.output: + output_file = args.output + if not args.output.endswith('.svg'): + output_file += ".svg" + logging.info(f"Appending .svg to '{args.output}'") + # Check if the HTML file exists + if os.path.exists(args.output): + logging.info(f"Overwriting '{args.output}'") + else: + output_file = args.input.replace(".json", "") + output_file += ".svg" + + return output_file,input_file + +def main(): + # Parse command-line arguments + parser = argparse.ArgumentParser( + description='Render a SVG badge that can be used in pipelines using shields.io. Therefore internet access is required') + parser.add_argument('-i', '--input', dest='input', help='Path to the TALP JSON file') + parser.add_argument('-o', '--output', dest='output', + help='Name of the svg file beeing generated. If not specified [input].svg will be chosen', required=False) + + # Parsing arguments + try: + args = parser.parse_args() + output_file, input_file = _validate_inputs(args) + except Exception as e: + logging.error(f"When parsing arguments ecountered the following error: {e}") + parser.print_help() + exit(1) + + badge = TalpBadge(input_file) + rendered_svg = badge.get_badge_svg() + with open(output_file, 'wb') as f: + f.write(rendered_svg) + + + + + +if __name__ == "__main__": + main() diff --git a/talp_pages/talp_common.py b/talp_pages/talp_common.py new file mode 100644 index 0000000000000000000000000000000000000000..e1ab1a29411242f3e0e9aa7d2bdae78a173777d3 --- /dev/null +++ b/talp_pages/talp_common.py @@ -0,0 +1,43 @@ +""" +File declaring some global scoped variables we rely to be there in our scrips +""" +import pathlib +from jinja2 import Environment, FileSystemLoader +from dataclasses import dataclass + +TALP_TABLE_NAME = "talp_data" +TALP_DB_COLUMN_TALP_OUPUT = "talp_ouput" +TALP_DB_COLUMN_TIMESTAMP = "timestamp" +TALP_DB_COLUMN_METADATA = "metadata" +TALP_DEFAULT_REGION_NAME = "MPI Execution" +TALP_POP_METRICS_KEY = 'popMetrics' + +TALP_TEMPLATE_PATH = pathlib.Path(__file__).parent.joinpath('templates').resolve() +TALP_TABLE_COLUMNS_WITH_DATATYPES = f"({TALP_DB_COLUMN_TIMESTAMP} TIMESTAMP, {TALP_DB_COLUMN_TALP_OUPUT} TEXT, {TALP_DB_COLUMN_METADATA} TEXT)" +TALP_TABLE_COLUMNS = f"({TALP_DB_COLUMN_TIMESTAMP} ,{TALP_DB_COLUMN_TALP_OUPUT}, {TALP_DB_COLUMN_METADATA})" + +TALP_PAGES_REPORT_SITE = 'report.html' +TALP_PAGES_TIME_SERIES_SITE='report_ts.html' +TALP_PAGES_INDEX_SITE='index.html' +TALP_PAGES_BAGDE='parallel_effiency.svg' + +def render_template(directory, template_name, **context): + # Set up Jinja2 environment and load the template + env = Environment(loader=FileSystemLoader(directory)) + template = env.get_template(template_name) + + # Render the template with the provided context + return template.render(context) + + +def date_time_to_string(datetime): + return datetime.strftime("%d.%m.%Y %H:%M") + + +@dataclass +class TalpRelativeLinks: + """Class to bundle the generated links in the html""" + home: str + report: str + report_ts: str + render_navbar: bool = False \ No newline at end of file diff --git a/talp_pages/talp_pages.py b/talp_pages/talp_pages.py new file mode 100755 index 0000000000000000000000000000000000000000..fd706daf8e0fc82d04a71f4212d0602d342dd406 --- /dev/null +++ b/talp_pages/talp_pages.py @@ -0,0 +1,106 @@ +#!/usr/bin/env python + +import argparse +import os +import logging + +from .talp_common import TALP_PAGES_INDEX_SITE, TALP_PAGES_REPORT_SITE, TALP_PAGES_TIME_SERIES_SITE, TALP_PAGES_BAGDE, render_template, TALP_TEMPLATE_PATH, TalpRelativeLinks +from .talp_badge import TalpBadge +from .talp_report import TalpReport +from .talp_report_ts import TalpTimeSeriesReport + +class TalpIndexPage: + + + def get_html(self,links:TalpRelativeLinks): + return render_template( + TALP_TEMPLATE_PATH, + 'talp_index_page.jinja', + links=vars(links), + ) + + + + + + + + + + +def _verify_input(args): + json_file = None + db_file = None + prefix = None + + # Check if the JSON file exists + if not os.path.exists(args.json_input): + logging.error( + f"The specified JSON file '{args.json_input}' does not exist.") + raise Exception("Not existing input file") + else: + json_file = args.json_input + + # Check if the SQLITE file exists + if not os.path.exists(args.db_input): + logging.error( + f"The specified SQLITE file '{args.db_input}' does not exist.") + raise Exception("Not existing input file") + else: + db_file = args.db_input + + + prefix = args.prefix + + return json_file,db_file,prefix + +def main(): + + def _add_prefix(pref,inp)->str: + if pref: + return pref + "_" + inp + else: + return inp + + # Creating the main argument parser + parser = argparse.ArgumentParser(description='Render the complete static html pages including a index page.' ) + # Adding argument for JSON file + parser.add_argument('-j', '--json', dest='json_input', help='Path to the TALP JSON file') + # Adding argument for DB file + parser.add_argument('-d', '--db', dest='db_input', help='Path to the TALP.db file') + # Adding argument for prefix + parser.add_argument('-p', '--prefix', dest='prefix', help=f"Prefix used in front of the (_){TALP_PAGES_REPORT_SITE}, (_){TALP_PAGES_TIME_SERIES_SITE} and (_){TALP_PAGES_INDEX_SITE}", required=False) + + # Parsing arguments + try: + args = parser.parse_args() + json_file,db_file,prefix = _verify_input(args) + except Exception as e: + logging.error(f"When parsing arguments ecountered the following error: {e}") + parser.print_help() + exit(1) + + output_report_ts=_add_prefix(prefix,TALP_PAGES_TIME_SERIES_SITE) + output_report=_add_prefix(prefix,TALP_PAGES_REPORT_SITE) + badge_file=_add_prefix(prefix,TALP_PAGES_BAGDE) + output_index=_add_prefix(prefix,TALP_PAGES_INDEX_SITE) + links=TalpRelativeLinks(output_index,output_report,output_report_ts,render_navbar=True) + index = TalpIndexPage() + report_ts= TalpTimeSeriesReport(db_file) + report = TalpReport(json_file) + bagde = TalpBadge(json_file) + + with open(output_index, 'w') as f: + f.write(index.get_html(links)) + + with open(output_report_ts, 'w') as f: + f.write(report_ts.get_html(links)) + + with open(output_report, 'w') as f: + f.write(report.get_html(links)) + + with open(badge_file, 'wb') as f: + f.write(bagde.get_badge_svg()) + + + diff --git a/talp_pages/talp_report.py b/talp_pages/talp_report.py new file mode 100755 index 0000000000000000000000000000000000000000..444dcae87f0c92f48f415d2cbcc1e9cc3ba8f6b0 --- /dev/null +++ b/talp_pages/talp_report.py @@ -0,0 +1,84 @@ +#!/usr/bin/env python + +import argparse +import json +import os +from urllib.request import urlopen, Request +import logging +from .talp_common import TALP_TEMPLATE_PATH,render_template,TALP_POP_METRICS_KEY,TalpRelativeLinks + +class TalpReport: + def __init__(self, talp_file): + # Now directly try to create a connection + with open(talp_file, 'r') as json_file: + # dont catch exception, but fail + self.raw_data = json.load(json_file) + + logging.debug(f"Created TalpReport and read the json: {self.raw_data}") + # do some sanity checks + if not self.raw_data[TALP_POP_METRICS_KEY]: + logging.error(f"No {TALP_POP_METRICS_KEY} found in {talp_file}. Try re-running DLB with arguments --talp --talp-summary=pop-metrics --talp-file={talp_file}") + raise Exception(f"No {TALP_POP_METRICS_KEY} found") + + + def get_html(self,links: TalpRelativeLinks): + pop_metric_regions = self.raw_data[TALP_POP_METRICS_KEY] + # Render the template with the data + return render_template( + TALP_TEMPLATE_PATH, 'talp_report.jinja', regions=pop_metric_regions,links=vars(links)) + + + +def _validate_inputs(args): + output_file=None + input_file=None + # Check if the JSON file exists + if not os.path.exists(args.input): + raise Exception( + f"Error: The specified JSON file '{args.json_file}' does not exist.") + else: + input_file= args.input + + + # Set output + if args.output: + output_file = args.output + if not args.output.endswith('.html'): + output_file += ".html" + logging.info(f"Appending .html to '{args.output}'") + # Check if the HTML file exists + if os.path.exists(args.output): + logging.info(f"Overwriting '{args.output}'") + else: + output_file = args.input.replace(".json", "") + output_file += ".html" + + return output_file,input_file + +def main(): + # Parse command-line arguments + parser = argparse.ArgumentParser( + description='Render HTML Table summary of the talp.json') + parser.add_argument('-i', '--input', dest='input', help='Path to the TALP JSON file') + parser.add_argument('-o', '--output', dest='output', + help='Name of the html file beeing generated. If not specified [input].html will be chosen', required=False) + + # Parsing arguments + try: + args = parser.parse_args() + output_file, input_file = _validate_inputs(args) + except Exception as e: + logging.error(f"When parsing arguments ecountered the following error: {e}") + parser.print_help() + exit(1) + + links=TalpRelativeLinks(output_file,output_file,output_file) + report = TalpReport(input_file) + rendered_html = report.get_html(links) + with open(output_file, 'w') as f: + f.write(rendered_html) + + + +if __name__ == "__main__": + main() diff --git a/talp_pages/talp_report_ts.py b/talp_pages/talp_report_ts.py new file mode 100755 index 0000000000000000000000000000000000000000..58c97ead58a1c500e582bcb3db176a9ca3c388a7 --- /dev/null +++ b/talp_pages/talp_report_ts.py @@ -0,0 +1,161 @@ +#!/usr/bin/env python + +import argparse +import json +import pandas as pd +import os +import logging +import sqlite3 +from datetime import datetime + +from .talp_common import TALP_DB_COLUMN_METADATA, TALP_DB_COLUMN_TALP_OUPUT, TALP_DB_COLUMN_TIMESTAMP, TALP_DEFAULT_REGION_NAME, TALP_TABLE_NAME, TALP_TEMPLATE_PATH, TALP_POP_METRICS_KEY, date_time_to_string, render_template, TalpRelativeLinks + + +class TalpTimeSeriesReport: + def __init__(self, databases_file): + # Now directly try to create a connection + conn = sqlite3.connect(databases_file) + + # and read the contents + self.df = pd.read_sql(f"SELECT * FROM {TALP_TABLE_NAME}", conn) + logging.debug( + f"Created TalpTimeSeries with and instantiated the df: {self.df}") + + def _extract_region_names_from_df(self): + region_names = set() + talp_outputs = self.df[TALP_DB_COLUMN_TALP_OUPUT].tolist() + for talp_output in talp_outputs: + raw_data = json.loads(talp_output) + for entry in raw_data[TALP_POP_METRICS_KEY]: + region_names.add(entry['name']) + return list(region_names) + + def _get_formatted_timestamps(self): + timestamps_df = self.df[TALP_DB_COLUMN_TIMESTAMP].tolist() + timestamps = [] + for timestamp in timestamps_df: + parsed_date = datetime.strptime(timestamp, "%Y-%m-%d %H:%M:%S.%f") + formatted_date = date_time_to_string(parsed_date) + timestamps.append(formatted_date) + return timestamps + + def _extract_metadata_from_df(self): + metadata_obj = {} + timestamps = self._get_formatted_timestamps() + metadatas = self.df[TALP_DB_COLUMN_METADATA].tolist() + + for timestamp, metadata in zip(timestamps, metadatas): + metadata_obj[timestamp] = json.loads(metadata) + metadata_obj[timestamp]['date'] = timestamp + + return metadata_obj + + @staticmethod + def _pack_series_data(name, data): + return { + 'name': name, + 'type': 'line', + 'data': data} + + def _extract_dataseries(self, metric): + timestamps = self._get_formatted_timestamps() + regions = self._extract_region_names_from_df() + talp_outputs = self.df[TALP_DB_COLUMN_TALP_OUPUT].tolist() + series = [] + + for region in regions: + data = [] + for talp_output in talp_outputs: + raw_data = json.loads(talp_output) + for entry in raw_data[TALP_POP_METRICS_KEY]: + if entry['name'] == region: + try: + data.append(entry[metric]) + except: + data.append(None) + logging.debug( + "WHOOPS not every timestamp has a data point, appending none") + if len(timestamps) != len(data): + logging.critical( + "Apparently not every timestamp has a data point!") + series.append(self._pack_series_data(region, data)) + return series + + def get_html(self,links: TalpRelativeLinks): + # Render the template with the data + region_names = self._extract_region_names_from_df() + metadata = self._extract_metadata_from_df() + pe_series = self._extract_dataseries('parallelEfficiency') + et_series = self._extract_dataseries('elapsedTime') + ipc_series = self._extract_dataseries('averageIPC') + timestamps = self._get_formatted_timestamps() + return render_template(TALP_TEMPLATE_PATH, 'talp_time_series.jinja', + timestamps=timestamps, + region_names=region_names, + metadata=metadata, + pe_series=pe_series, + et_series=et_series, + ipc_series=ipc_series, + default_region_name=TALP_DEFAULT_REGION_NAME, + links=vars(links)) + + +def _validate_inputs(args): + output_file = None + input_file = None + + # Check if the SQLITE file exists + if not os.path.exists(args.input): + logging.error( + f"The specified SQLITE file '{args.input}' does not exist.") + raise Exception("Not existing input file") + else: + input_file = args.input + + # Set output + if args.output: + output_file = args.output + if not args.output.endswith('.html'): + output_file += ".html" + logging.info(f"Appending .html to '{args.output}'") + # Check if the HTML file exists + if os.path.exists(args.output): + logging(f"Overwriting '{args.output}'") + else: + output_file = args.input.replace(".json", "") + output_file += ".html" + + return output_file, input_file + + +def main(): + # Parse command-line arguments + parser = argparse.ArgumentParser( + description='Render html summary of the historic TALP data in the provided talp database') + parser.add_argument('-i', '--input', dest='input', + help='Path to the TALP.db file') + parser.add_argument('-o', '--output', dest='output', + help='Name of the html file beeing generated. If not specified [input].html will be chosen', required=False) + + + # Parsing arguments + try: + args = parser.parse_args() + output_file, input_file = _validate_inputs(args) + except Exception as e: + logging.error(f"When parsing arguments ecountered the following error: {e}") + parser.print_help() + exit(1) + + + links=TalpRelativeLinks(output_file,output_file,output_file) + timeseries = TalpTimeSeriesReport(input_file) + rendered_html = timeseries.get_html(links) + + # Save or display the rendered HTML as needed + with open(output_file, 'w') as f: + f.write(rendered_html) + + +if __name__ == "__main__": + main() diff --git a/talp_pages/templates/talp_index_page.jinja b/talp_pages/templates/talp_index_page.jinja new file mode 100644 index 0000000000000000000000000000000000000000..3dc39b6a8c0fc85c7fcedd41470184af2c43b053 --- /dev/null +++ b/talp_pages/templates/talp_index_page.jinja @@ -0,0 +1,155 @@ + + + + + + + + + + + TALP Pages + + + +
+
+
+
+ + + + + + + + {% if links.render_navbar %} + + {% endif %} +
+
+
+
+
+ + + + + +

TALP Pages

+
+

+ Easily monitor your applications performance and keep a log of performance measurements over time. +

+
+ +
+
+
+ +
+

Current features

+
+
+
+ +
+
+

Performance Report

+

+ View performance metrics collected from the last run of your application in a table form. + You also get visual highlights of regions where the metrics indicate that performance can be improved. +

+ + View metrics of latest run + +
+
+
+
+ +
+
+

Time Series Report

+

+ Look at the time evoluation of your performance metrics, like IPC or execution time. + To not overwhelm you we provide the possibility to filter for certain regions. +

+ + View time evolution + +
+
+
+
+ +
+
+

Badge Parallel efficiency: 0.95Parallel efficiency0.44

+

+ To be able to flex your execellent performance to other groups, we provide a Bagde you can integrate into your CI/CD pipeline. + +

+ + Learn how to integrate the badge + +
+
+
+
+ + +
+
+
+ + + + + + + + BEPPP - Barcelona Supercomputing Center +
+ + +
+
+ + + + + + + + \ No newline at end of file diff --git a/talp_pages/templates/talp_report.jinja b/talp_pages/templates/talp_report.jinja new file mode 100644 index 0000000000000000000000000000000000000000..89d6212250d11858aea43f8ca461d20f679f6328 --- /dev/null +++ b/talp_pages/templates/talp_report.jinja @@ -0,0 +1,197 @@ + + + + + + + + + + + + + + + + + + + + TALP Pages + + + +
+
+
+
+ + + + + + + + {% if links.render_navbar %} + + {% endif %} +
+
+
+
+
+
+
+

Performance Report

+
+
+
+
+ + + + + + + + + + + + + + + {% for entry in regions %} + + + + + + + + + + + {% endfor %} + +
NameElapsed Time [s]Average IPCParallel EfficiencyCommunication EfficiencyLoad BalanceLB InLB Out
{{ entry.name }}{{ entry.elapsedTime / 1e9}}{{ entry.averageIPC }}{{ entry.parallelEfficiency }}{{ entry.communicationEfficiency }}{{ entry.loadBalance }}{{ entry.lbIn }}{{ entry.lbOut }}
+
+
+
+
+ +
+
+ Metrics overview +
+
+ The coloring shown above follows the following rules: + + + + + + + + + + + + + + + + + + + + + +
Average IPCParallel Efficiency
< 1 < 0.6
> 1
< 2
> 0.6
< 0.8
> 2 > 0.8
+

+ For up to date overview of the computed metrics you can consult the documentation via: +

+ Show in-depth explaination +
+
+ +

+ + + +
+
+
+ +
+
+
+ + + + + + + + BEPPP - Barcelona Supercomputing Center +
+ + +
+
+ + + + + + + + \ No newline at end of file diff --git a/talp_pages/templates/talp_time_series.jinja b/talp_pages/templates/talp_time_series.jinja new file mode 100644 index 0000000000000000000000000000000000000000..c6eedde81924eceb98cc1d65f6896bdbba5becfa --- /dev/null +++ b/talp_pages/templates/talp_time_series.jinja @@ -0,0 +1,364 @@ + + + + + + + + + + + + + + TALP Pages + + + +
+
+
+
+ + + + + + + + {% if links.render_navbar %} + + {% endif %} +
+
+
+
+
+
+
+

Time Series Report

+
+
+
+
+

Select Regions

+
+
+
+ +
+
+
+
+

Elapsed Time

+
+
+
+

Parallel Efficiency

+
+
+
+

Average IPC

+
+
+
+
+ + + + + +
+
+
+ + + + + + + + BEPPP - Barcelona Supercomputing Center +
+ + +
+
+ + + + + + + + + + + \ No newline at end of file diff --git a/tests/example_files/TALP.db b/tests/example_files/TALP.db new file mode 100644 index 0000000000000000000000000000000000000000..c1a51109b44330e4d22f7697b018609d8cf8a1c2 Binary files /dev/null and b/tests/example_files/TALP.db differ diff --git a/tests/example_files/latest_talp_run.json b/tests/example_files/latest_talp_run.json new file mode 100644 index 0000000000000000000000000000000000000000..198663c2b99e395309ac49f46e91db7ba6e964e7 --- /dev/null +++ b/tests/example_files/latest_talp_run.json @@ -0,0 +1 @@ +{"popMetrics": [{"name": "MPI Execution", "elapsedTime": 272555277168, "averageIPC": 1.0638629633856558, "parallelEfficiency": 0.44329474088341353, "communicationEfficiency": 0.98, "loadBalance": 1.0, "lbIn": 1.0, "lbOut": 1.0}, {"name": "add", "elapsedTime": 883643665, "averageIPC": 0.15941405179902146, "parallelEfficiency": 0.8860423190410736, "communicationEfficiency": 1.0, "loadBalance": 1.0, "lbIn": 1.0, "lbOut": 1.0}, {"name": "calc_c", "elapsedTime": 21166352588, "averageIPC": 3.4940884389457323, "parallelEfficiency": 1.395265308127606, "communicationEfficiency": 0.98, "loadBalance": 1.0, "lbIn": 1.0, "lbOut": 1.0}, {"name": "calc_a", "elapsedTime": 1383002654, "averageIPC": 0.5569558263779243, "parallelEfficiency": 1.3299091857887486, "communicationEfficiency": 1.0, "loadBalance": 1.0, "lbIn": 1.0, "lbOut": 1.0}, {"name": "calc_b", "elapsedTime": 79949502663, "averageIPC": 2.4927618935933906, "parallelEfficiency": 1.3335437400325858, "communicationEfficiency": 1.0, "loadBalance": 1.0, "lbIn": 1.0, "lbOut": 1.0}, {"name": "copy", "elapsedTime": 215464884, "averageIPC": 0.275483039959854, "parallelEfficiency": 0.9614843562313078, "communicationEfficiency": 1.0, "loadBalance": 1.0, "lbIn": 1.0, "lbOut": 1.0}, {"name": "exchange", "elapsedTime": 6553864629, "averageIPC": 2.8647142921415183, "parallelEfficiency": 0.3888434061827481, "communicationEfficiency": 0.47, "loadBalance": 1.0, "lbIn": 1.0, "lbOut": 1.0}, {"name": "file_io", "elapsedTime": 4541765341, "averageIPC": 1.994282412811448, "parallelEfficiency": 0.8074404254725357, "communicationEfficiency": 0.97, "loadBalance": 1.0, "lbIn": 1.0, "lbOut": 1.0}, {"name": "app", "elapsedTime": 242584221202, "averageIPC": 3.132834798877818, "parallelEfficiency": 1.1396645336217526, "communicationEfficiency": 0.98, "loadBalance": 1.0, "lbIn": 1.0, "lbOut": 1.0}, {"name": "initialize", "elapsedTime": 58360562981, "averageIPC": 2.365307898514589, "parallelEfficiency": 1.409373651886027, "communicationEfficiency": 1.0, "loadBalance": 1.0, "lbIn": 1.0, "lbOut": 1.0}, {"name": "initialize_mesh", "elapsedTime": 23834707590, "averageIPC": 4.09071006557, "parallelEfficiency": 0.7776872642030584, "communicationEfficiency": 1.0, "loadBalance": 1.0, "lbIn": 1.0, "lbOut": 1.0}, {"name": "initialize_timestep", "elapsedTime": 13727230220, "averageIPC": 2.170245814953007, "parallelEfficiency": 1.175461341205928, "communicationEfficiency": 1.0, "loadBalance": 1.0, "lbIn": 1.0, "lbOut": 1.0}, {"name": "lin_comb", "elapsedTime": 728435756, "averageIPC": 0.35928169976410834, "parallelEfficiency": 0.8569676104792149, "communicationEfficiency": 1.0, "loadBalance": 1.0, "lbIn": 1.0, "lbOut": 1.0}, {"name": "mpi_allreduce", "elapsedTime": 672534998, "averageIPC": 0.11803165901707928, "parallelEfficiency": 0.0, "communicationEfficiency": 0.0, "loadBalance": 1.0, "lbIn": 1.0, "lbOut": 1.0}, {"name": "calc_d", "elapsedTime": 16702225246, "averageIPC": 3.7339287805914583, "parallelEfficiency": 0.8363065871218105, "communicationEfficiency": 1.0, "loadBalance": 1.0, "lbIn": 1.0, "lbOut": 1.0}, {"name": "calc_e", "elapsedTime": 830161999, "averageIPC": 1.5899417643702443, "parallelEfficiency": 0.756755534495032, "communicationEfficiency": 0.66, "loadBalance": 1.0, "lbIn": 1.0, "lbOut": 1.0}, {"name": "calc_f", "elapsedTime": 985913848, "averageIPC": 1.738393044198159, "parallelEfficiency": 0.8969547040338184, "communicationEfficiency": 0.77, "loadBalance": 1.0, "lbIn": 1.0, "lbOut": 1.0}, {"name": "calc_g", "elapsedTime": 1883692532, "averageIPC": 1.1260789858395248, "parallelEfficiency": 0.7129478046576753, "communicationEfficiency": 0.91, "loadBalance": 1.0, "lbIn": 1.0, "lbOut": 1.0}, {"name": "calc_h", "elapsedTime": 1900364400, "averageIPC": 0.5885122535235703, "parallelEfficiency": 0.8254250967088446, "communicationEfficiency": 1.0, "loadBalance": 1.0, "lbIn": 1.0, "lbOut": 1.0}, {"name": "calc_i", "elapsedTime": 66322685698, "averageIPC": 2.772935338543788, "parallelEfficiency": 1.0812881487475872, "communicationEfficiency": 1.0, "loadBalance": 1.0, "lbIn": 1.0, "lbOut": 1.0}, {"name": "calc_j", "elapsedTime": 11103086490, "averageIPC": 2.5385979086466888, "parallelEfficiency": 0.9634742370020535, "communicationEfficiency": 1.0, "loadBalance": 1.0, "lbIn": 1.0, "lbOut": 1.0}, {"name": "calc_k", "elapsedTime": 4947901892, "averageIPC": 3.4604996880990897, "parallelEfficiency": 1.1256651935325457, "communicationEfficiency": 1.0, "loadBalance": 1.0, "lbIn": 1.0, "lbOut": 1.0}, {"name": "calc_l", "elapsedTime": 15802114432, "averageIPC": 1.480602348333723, "parallelEfficiency": 1.2926844383641096, "communicationEfficiency": 1.0, "loadBalance": 1.0, "lbIn": 1.0, "lbOut": 1.0}]} \ No newline at end of file diff --git a/tests/test_bagde.py b/tests/test_bagde.py new file mode 100644 index 0000000000000000000000000000000000000000..8562395091e3a1edfdea472f8bc3e4ade5d15714 --- /dev/null +++ b/tests/test_bagde.py @@ -0,0 +1,17 @@ +import pathlib +from talp_pages.talp_badge import TalpBadge +TALP_JSON_EXAMPLE_DIR= pathlib.Path(__file__).parent.joinpath('example_files').resolve() + + +def test_construct(): + file=TALP_JSON_EXAMPLE_DIR / "latest_talp_run.json" + badge = TalpBadge(file) + assert badge + + +def test_get_content(): + file=TALP_JSON_EXAMPLE_DIR / "latest_talp_run.json" + badge = TalpBadge(file) + expected_effiency = str(round(0.44329474088341353, 2)) + svg=badge.get_badge_svg() + assert expected_effiency.encode() in svg diff --git a/tests/test_index_page.py b/tests/test_index_page.py new file mode 100644 index 0000000000000000000000000000000000000000..41740732f25f5874d72f1a9a2d1ebc637ef1fee9 --- /dev/null +++ b/tests/test_index_page.py @@ -0,0 +1,19 @@ + +from talp_pages.talp_pages import TalpIndexPage +from talp_pages.talp_common import TALP_PAGES_BAGDE,TALP_PAGES_TIME_SERIES_SITE,TALP_PAGES_REPORT_SITE,TalpRelativeLinks +def test_construct(): + report = TalpIndexPage() + assert report + + +def test_get_content(): + report = "rapphort.html" + time_series = "rapphort_ts.html" + badge="unparallel_eff.svg" + links=TalpRelativeLinks(report,time_series,badge,render_navbar=True) + index = TalpIndexPage() + html = index.get_html(links) + assert html + assert report in html + assert time_series in html + assert badge in html diff --git a/tests/test_report.py b/tests/test_report.py new file mode 100644 index 0000000000000000000000000000000000000000..cdde4423de73fe00cfe8c5fba06ed3786c8c9f76 --- /dev/null +++ b/tests/test_report.py @@ -0,0 +1,18 @@ +import pathlib +from talp_pages.talp_report import TalpReport +from talp_pages.talp_common import TalpRelativeLinks +TALP_JSON_EXAMPLE_DIR= pathlib.Path(__file__).parent.joinpath('example_files').resolve() + + +def test_construct(): + file=TALP_JSON_EXAMPLE_DIR / "latest_talp_run.json" + report = TalpReport(file) + assert report + + +def test_get_content(): + file=TALP_JSON_EXAMPLE_DIR / "latest_talp_run.json" + report = TalpReport(file) + links=TalpRelativeLinks("output_file","output_file","output_file") + html = report.get_html(links) + assert html and len(html) > 80 diff --git a/tests/test_report_ts.py b/tests/test_report_ts.py new file mode 100644 index 0000000000000000000000000000000000000000..dcecaf464f2aa87415d82f3f92b1cb1b5d4e0945 --- /dev/null +++ b/tests/test_report_ts.py @@ -0,0 +1,17 @@ +import pathlib +from talp_pages.talp_report_ts import TalpTimeSeriesReport,TalpRelativeLinks +TALP_JSON_EXAMPLE_DIR= pathlib.Path(__file__).parent.joinpath('example_files').resolve() + + +def test_construct(): + file=TALP_JSON_EXAMPLE_DIR / "TALP.db" + report = TalpTimeSeriesReport(file) + assert report + + +def test_get_content(): + file=TALP_JSON_EXAMPLE_DIR / "TALP.db" + report = TalpTimeSeriesReport(file) + links=TalpRelativeLinks("output_file","output_file","output_file") + html = report.get_html(links) + assert html and len(html) > 80