Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
49 changes: 49 additions & 0 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
name: CI - Tests

on:
push:
branches:
- main
- hug
pull_request:
branches:
- main
- hug

jobs:
tests:
runs-on: ubuntu-latest

steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
fetch-depth: 1
lfs: true

- name: Ensure Git LFS
run: |
git lfs install --local || true

- name: Setup conda environment from env.yml
uses: conda-incubator/setup-miniconda@v3
with:
environment-file: env.yml
activate-environment: lddata
auto-update-conda: true

- name: Install package into the conda env
shell: bash -l {0}
run: |
conda activate lddata
python -m pip install --upgrade pip
python -m pip install -e .
python -m pip install pytest

- name: Run tests
env:
HF_TOKEN: ${{ secrets.HF_TOKEN }}
shell: bash -l {0}
run: |
conda activate lddata
python -m pytest -q
64 changes: 64 additions & 0 deletions .github/workflows/sync-to-huggingface.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
name: Sync to HuggingFace Dataset

on:
push:
branches:
- main
- hug
paths:
- 'pregenerated_pointsets/**'
- 'dnet/**'
- 'lattice/**'
- LICENSE.txt
- LDData Demo.ipynb
- LD_DATA.md
- README.md
workflow_dispatch: # Allow manual triggering

jobs:
sync-to-hf:
runs-on: ubuntu-latest

steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
fetch-depth: 1
lfs: true

- name: Ensure Git LFS
run: |
git lfs install --local || true

- name: Setup conda environment from env.yml
uses: conda-incubator/setup-miniconda@v3
with:
environment-file: env.yml
activate-environment: lddata
auto-update-conda: true

- name: Install package into the conda env
shell: bash -l {0}
run: |
python -m pip install --upgrade pip
python -m pip install -e .

- name: Upload to HuggingFace
env:
HF_TOKEN: ${{ secrets.HF_TOKEN }}
run: |
python upload.py \
--repo-id QMCSoftware/LDData \
--local-path .

- name: Report status
if: success()
run: |
echo "✅ Successfully synchronized pregenerated_pointsets to HuggingFace dataset"
echo "Dataset URL: https://huggingface.co/datasets/QMCSoftware/LDData"

- name: Report failure
if: failure()
run: |
echo "❌ Failed to synchronize to HuggingFace"
echo "Check the logs above for details"
5 changes: 4 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
@@ -1,4 +1,7 @@
_ags/*
*.DS_Store
raw.githubusercontent.com/*
*.ipynb-checkpoints
*.ipynb-checkpoints
/sc
.vscode/settings.json
*.pyc
16 changes: 4 additions & 12 deletions LDData Demo.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -62,8 +62,8 @@
"name": "stderr",
"output_type": "stream",
"text": [
"/Users/agsorok/miniconda3/envs/lddata/lib/python3.13/site-packages/qmcpy/discrete_distribution/lattice/lattice.py:257\n",
"\tParameterWarning: Non-randomized lattice sequence includes the origin\n"
"/Users/terrya/miniconda3/envs/lddata/lib/python3.13/site-packages/qmcpy/discrete_distribution/lattice/lattice.py:248: ParameterWarning: Without randomization, the first lattice point is the origin\n",
" warnings.warn(\"Without randomization, the first lattice point is the origin\",ParameterWarning)\n"
]
},
{
Expand Down Expand Up @@ -130,8 +130,8 @@
"name": "stderr",
"output_type": "stream",
"text": [
"/Users/agsorok/miniconda3/envs/lddata/lib/python3.13/site-packages/qmcpy/discrete_distribution/digital_net_b2/digital_net_b2.py:389\n",
"\tParameterWarning: Non-randomized DigitalNetB2 sequence includes the origin\n"
"/Users/terrya/miniconda3/envs/lddata/lib/python3.13/site-packages/qmcpy/discrete_distribution/digital_net_b2/digital_net_b2.py:421: ParameterWarning: Without randomization, the first digtial net point is the origin\n",
" warnings.warn(\"Without randomization, the first digtial net point is the origin\",ParameterWarning)\n"
]
},
{
Expand Down Expand Up @@ -302,14 +302,6 @@
"generators = [qp.DigitalNetB2(d,randomize=False,generating_matrices=file) for file in files]\n",
"plot_extensible_projections(generators,files,n=n)"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "b8ef5511",
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
Expand Down
40 changes: 40 additions & 0 deletions LDData/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
"""LDData package shim.

This package file exposes the top-level `upload.py` module as
`LDData.upload` so tests and imports using `from LDData import upload`
work without moving the original script.
"""
from __future__ import annotations

import importlib.util
from pathlib import Path
import sys


def _load_top_level_module(name: str, filename: Path):
spec = importlib.util.spec_from_file_location(name, str(filename))
module = importlib.util.module_from_spec(spec)
loader = spec.loader
assert loader is not None
loader.exec_module(module)
return module


# Locate the repository root (parent of this package directory)
_repo_root = Path(__file__).resolve().parent.parent
# Path to the existing top-level upload.py
_upload_path = _repo_root / "upload.py"

if _upload_path.exists():
# Load the top-level upload.py as a module named 'LDData.upload'
_mod = _load_top_level_module("LDData.upload", _upload_path)
# Expose it in the package namespace
upload = _mod
__all__ = ["upload"]
else:
# Fallback: create a minimal stub so imports fail with clearer message later
def _missing():
raise ImportError("upload.py not found at project root")

upload = _missing
__all__ = ["upload"]
Loading
Loading