Skip to content

Commit b0fe66e

Browse files
authored
Merge branch 'master' into ts030325_2vbp
2 parents 6f817d2 + 3ebb674 commit b0fe66e

File tree

450 files changed

+81498
-61290
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

450 files changed

+81498
-61290
lines changed

.github/actions/prepare_environment/action.yml

Lines changed: 10 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -14,12 +14,16 @@ name: Prepare a conda environment for NNPDF installation
1414
# Remember that these actions are only available after checking out the repository,
1515
# which should always be the first step!
1616
# - uses: actions/checkout@v4
17+
#
18+
# This action also removes part of the cache of the runner to avoid using too much storage
19+
# (see at the bottom)
20+
# It would probably be clever to start using a different runner though TODO
1721

1822
inputs:
1923
python-version:
2024
required: true
2125
description: "Python version to use"
22-
default: "3.12"
26+
default: "3.13"
2327

2428
runs:
2529
using: "composite"
@@ -28,12 +32,15 @@ runs:
2832
with:
2933
python-version: ${{ inputs.python-version }}
3034
use-mamba: true
31-
channels: https://packages.nnpdf.science/public,conda-forge
35+
channels: conda-forge
3236
show-channel-urls: true
3337
auto-update-conda: true
3438
activate-environment: nnpdf_environment
3539
conda-remove-defaults: true
36-
- name: Add NETRC
40+
- name: Add NETRC and cleanup cache
3741
shell: bash -l {0}
3842
run: |
43+
sudo rm -rf /opt/hostedtoolcache/Python/3.9*
44+
sudo rm -rf /opt/hostedtoolcache/CodeQL
45+
sudo rm -rf /usr/local/lib/android/sdk
3946
echo "$NETRC_FILE" | base64 --decode > ~/.netrc

.github/workflows/all_tests_nnpdf.yml

Lines changed: 38 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ jobs:
2121
strategy:
2222
matrix:
2323
os: [ubuntu-latest, macos-14]
24-
python-version: ["3.9", "3.12"]
24+
python-version: ["3.10", "3.13"]
2525
include:
2626
- os: ubuntu-latest
2727
CONDA_OS: linux-64
@@ -35,9 +35,9 @@ jobs:
3535
- name: Test n3fit and validphys
3636
shell: bash -l {0}
3737
run: |
38-
pytest --cov=${PWD} --cov-config=pyproject.toml --mpl --pyargs validphys n3fit --mpl-default-tolerance 18
38+
pytest --cov=${PWD} --cov-config=pyproject.toml --mpl --pyargs validphys n3fit --mpl-default-tolerance 24
3939
- name: Keep coverage file
40-
if: startsWith(matrix.python-version, '3.12')
40+
if: startsWith(matrix.python-version, '3.13')
4141
uses: actions/upload-artifact@v4
4242
with:
4343
include-hidden-files: true
@@ -74,7 +74,7 @@ jobs:
7474
fetch-depth: 0
7575
- uses: ./.github/actions/prepare_environment
7676
with:
77-
python-version: "3.12"
77+
python-version: "3.13"
7878
- name: Build recipe
7979
shell: bash -l {0}
8080
run: |
@@ -96,10 +96,13 @@ jobs:
9696
- uses: actions/checkout@v4
9797
- uses: actions/setup-python@v5
9898
with:
99-
python-version: "3.12"
99+
python-version: "3.13"
100100
- name: Install nnpdf without LHAPDF
101101
shell: bash -l {0}
102102
run: |
103+
sudo rm -rf /opt/hostedtoolcache/Python/{3.9*}
104+
sudo rm -rf /opt/hostedtoolcache/CodeQL
105+
sudo rm -rf /usr/local/lib/android/sdk
103106
pip install .[nolha,torch]
104107
# Since there is no LHAPDF in the system, initialize the folder and download pdfsets.index
105108
lhapdf-management update --init
@@ -108,6 +111,36 @@ jobs:
108111
run: |
109112
cd n3fit/runcards/examples
110113
n3fit Basic_runcard.yml 4
114+
cat Basic_runcard/nnfit/*/Basic_runcard.json
115+
- name: Test we can still run postfit
116+
shell: bash -l {0}
117+
run: |
118+
output=$(vp-get fit NNPDF40_nnlo_like_CI_testing_250616)
119+
fit_path=$(echo $output | grep -o "PosixPath('.*')" | cut -d"'" -f2)
120+
mv ${fit_path} .
121+
postfit 50 NNPDF40_nnlo_like_CI_testing_250616
122+
123+
run_jax:
124+
runs-on: ubuntu-latest
125+
env:
126+
KERAS_BACKEND: jax
127+
steps:
128+
- uses: actions/checkout@v4
129+
- uses: actions/setup-python@v5
130+
with:
131+
python-version: "3.13"
132+
- name: Install nnpdf without LHAPDF
133+
shell: bash -l {0}
134+
run: |
135+
pip install .[nolha,jax]
136+
# Since there is no LHAPDF in the system, initialize the folder and download pdfsets.index
137+
lhapdf-management update --init
138+
- name: Test we can run one runcard
139+
shell: bash -l {0}
140+
run: |
141+
cd n3fit/runcards/examples
142+
n3fit Basic_runcard.yml 42
143+
cat Basic_runcard/nnfit/*/Basic_runcard.json
111144
112145
full_coverage:
113146
needs: [run_package_tests, regression_tests]

.github/workflows/fitbot.yml

Lines changed: 19 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -8,10 +8,12 @@ on:
88

99
# some general variables
1010
env:
11-
N3FIT_MAXNREP: 20 # total number of replicas to fit
12-
POSTFIT_NREP: 16 # requested replicas for postfit
13-
REFERENCE_SET: NNBOT-5684b6fe5-2025-05-12 # reference set for exact results
14-
STABLE_REFERENCE_SET: NNBOT-c0f99b7b3-2024-02-28 # reference set for last tag
11+
N3FIT_MAXNREP: 30 # total number of replicas to fit
12+
POSTFIT_NREP: 15 # requested minimum replicas for postfit
13+
# IMPORTANT
14+
# WHEN CHANGING THE REFERENCE SET, THE NEW REFERENCE MUST BE MANUALLY UPLOADED TO THE SERVER
15+
REFERENCE_SET: NNNBOT-1a81255f3-2026-01-27 # reference set for exact results
16+
STABLE_REFERENCE_SET: NNBOT-99108504e-2025-11-22 # reference set for last tag
1517
PYTHONHASHSEED: "0"
1618

1719
jobs:
@@ -55,12 +57,12 @@ jobs:
5557
cd $RUNFOLDER
5658
cp developing.yml $RUNCARD.yml
5759
vp-setupfit $RUNCARD.yml
58-
# run n3fit replicas sequentially
60+
# try running the n3fit replicas in parallel
5961
- name: Running n3fit
6062
shell: bash -l {0}
6163
run: |
6264
cd $RUNFOLDER
63-
for ((i=1; i<=$N3FIT_MAXNREP; i+=1)); do n3fit $RUNCARD.yml $i ; done
65+
n3fit $RUNCARD.yml 1 -r $N3FIT_MAXNREP
6466
# performing DGLAP
6567
- name: Running dglap
6668
shell: bash -l {0}
@@ -79,17 +81,16 @@ jobs:
7981
run: |
8082
conda activate nnpdfenv
8183
cd $RUNFOLDER
82-
postfit $POSTFIT_NREP $RUNCARD
83-
res=$(vp-upload $RUNCARD 2>&1)
84-
echo ${res}
85-
while echo ${res} | grep ERROR >/dev/null
86-
do
87-
sleep 30s
88-
res=$(vp-upload $RUNCARD 2>&1)
89-
done
90-
url=$( echo "${res}" | grep https )
91-
echo "FIT_URL=$url" >> $GITHUB_ENV
92-
# running validphys report
84+
postfit $POSTFIT_NREP $RUNCARD --at-least-nrep
85+
ln -s ${PWD}/${RUNCARD} ${CONDA_PREFIX}/share/NNPDF/results
86+
tar -czf ${RUNCARD}.tar.gz ${RUNCARD}
87+
echo "PATH_TO_SAVE=${PWD}/${RUNCARD}.tar.gz" >> ${GITHUB_ENV}
88+
- name: Keep the fit as an artifact
89+
if: ${{ !cancelled() }}
90+
uses: actions/upload-artifact@v4
91+
with:
92+
name: ${{ env.RUNCARD }}.tar.gz
93+
path: ${{ env.PATH_TO_SAVE }}
9394
- name: Building and upload report
9495
shell: bash -l {0}
9596
run: |
@@ -121,6 +122,6 @@ jobs:
121122
- Fit Name: ${{ env.RUNCARD }}
122123
- Fit Report wrt master: ${{ env.REPORT_URL }}
123124
- Fit Report wrt latest stable reference: ${{ env.REPORT_URL_STABLE }}
124-
- Fit Data: ${{ env.FIT_URL }}
125+
- Fit Data: fit data is kept as an artifact. Please, remember to upload it to the server if the reference is changed.
125126
126127
Check the report **carefully**, and please buy me a :coffee: , or better, a GPU :wink:!

.github/workflows/redo_regressions.yml

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -34,7 +34,6 @@ jobs:
3434
echo "$NETRC_FILE" | base64 --decode > ~/.netrc
3535
conda config --remove channels defaults
3636
conda config --append channels conda-forge
37-
conda config --prepend channels https://packages.nnpdf.science/public
3837
conda config --set show_channel_urls true
3938
conda install lhapdf pandoc
4039
- name: Install nnpdf with testing and qed extras

.gitignore

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -355,6 +355,7 @@ instance/
355355

356356
# Sphinx documentation
357357
docs/_build/
358+
doc/sphinx/source/theories_central.csv
358359

359360
# PyBuilder
360361
target/

.pre-commit-config.yaml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22
# See https://pre-commit.com/hooks.html for more hooks
33
repos:
44
- repo: https://github.com/pre-commit/pre-commit-hooks
5-
rev: 'v5.0.0'
5+
rev: 'v6.0.0'
66
hooks:
77
- id: check-merge-conflict
88
- id: check-toml

README.md

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,9 @@
66
[![Docs](https://github.com/NNPDF/nnpdf/actions/workflows/upload_docs.yml/badge.svg)](https://github.com/NNPDF/nnpdf/actions/workflows/upload_docs.yml)
77
[![Commondata](https://github.com/NNPDF/nnpdf/actions/workflows/check_newcd.yml/badge.svg)](https://github.com/NNPDF/nnpdf/actions/workflows/check_newcd.yml)
88

9+
[![EPJC](https://img.shields.io/badge/Eur.Phys.J.C-81%20(2021)%2010-958?color=%231A43BF)](https://link.springer.com/article/10.1140/epjc/s10052-021-09747-9)
910
[![DOI](https://zenodo.org/badge/118135201.svg)](https://zenodo.org/badge/latestdoi/118135201)
11+
[![HSF](https://hepsoftwarefoundation.org/images/HSF-logo/HSF-Affiliated.svg)](https://hepsoftwarefoundation.org/projects/projects)
1012

1113
# NNPDF: An open-source machine learning framework for global analyses of parton distributions
1214

deprecated_functions.py

Lines changed: 0 additions & 138 deletions
This file was deleted.

doc/sphinx/make_theory_csv.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55
from argparse import ArgumentParser
66
from pathlib import Path
77

8-
from nnpdf_data import theory_cards
8+
from nnpdf_data import THEORY_CARDS_PATH
99
from nnpdf_data.theorydbutils import fetch_all
1010

1111
if __name__ == "__main__":
@@ -20,7 +20,7 @@
2020

2121
args = parser.parse_args()
2222

23-
theory_df = fetch_all(theory_cards)
23+
theory_df = fetch_all(THEORY_CARDS_PATH)
2424

2525
# Enforce the following order in the table:
2626
order = ["PTO", "QED", "Comments", "IC", "Q0", "ModEv"]

doc/sphinx/source/data/data-config.rst

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -23,9 +23,13 @@ located in the ``nnpdf`` git repository at
2323

2424
where a separate ``CommonData`` file is stored for each *Dataset* with the
2525
filename format described in :ref:`dataset-naming-convention`.
26-
The data is installed as part of the python package of ``nnpdf``,
26+
The data is installed as part of the python package of ``nnpdf`` or ``nnpdf_data``.
2727
all data files to be installed must have a ``.yaml`` extension.
2828

29+
It is possible to add extra sources of data by adding a ``data_path`` variable
30+
to the ``nnprofile.yaml`` file (see :ref:`nnprofile`).
31+
The extra sources will always be searched before the default path.
32+
2933

3034
Theory lookup table
3135
===================

0 commit comments

Comments
 (0)