Skip to content
Open
Show file tree
Hide file tree
Changes from 8 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
58 changes: 58 additions & 0 deletions .github/workflows/sync-to-huggingface.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@
name: Sync to HuggingFace Dataset

on:
push:
branches:
- main
- hug
paths:
- 'pregenerated_pointsets/**'
- 'dnet/**'
- 'lattice/**'
- LICENSE.txt
- LDData Demo.ipynb
- LD_DATA.md
- README.md
workflow_dispatch: # Allow manual triggering

jobs:
sync-to-hf:
runs-on: ubuntu-latest

steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
ref: main
Comment thread
sou-cheng-choi marked this conversation as resolved.
Outdated
fetch-depth: 1

- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: '3.11'

- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install "huggingface_hub>=0.32.0" httpx

- name: Upload to HuggingFace
env:
HF_TOKEN: ${{ secrets.HF_TOKEN }}
run: |
python upload.py \
--repo-id QMCSoftware/LDData \
--local-path . \
--yes

- name: Report status
if: success()
run: |
echo "✅ Successfully synchronized pregenerated_pointsets to HuggingFace dataset"
echo "Dataset URL: https://huggingface.co/datasets/QMCSoftware/LDData"

- name: Report failure
if: failure()
run: |
echo "❌ Failed to synchronize to HuggingFace"
echo "Check the logs above for details"
5 changes: 4 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
@@ -1,4 +1,7 @@
_ags/*
*.DS_Store
raw.githubusercontent.com/*
*.ipynb-checkpoints
*.ipynb-checkpoints
/sc
.vscode/settings.json
*.pyc
16 changes: 4 additions & 12 deletions LDData Demo.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -62,8 +62,8 @@
"name": "stderr",
"output_type": "stream",
"text": [
"/Users/agsorok/miniconda3/envs/lddata/lib/python3.13/site-packages/qmcpy/discrete_distribution/lattice/lattice.py:257\n",
"\tParameterWarning: Non-randomized lattice sequence includes the origin\n"
"/Users/terrya/miniconda3/envs/lddata/lib/python3.13/site-packages/qmcpy/discrete_distribution/lattice/lattice.py:248: ParameterWarning: Without randomization, the first lattice point is the origin\n",
" warnings.warn(\"Without randomization, the first lattice point is the origin\",ParameterWarning)\n"
]
},
{
Expand Down Expand Up @@ -130,8 +130,8 @@
"name": "stderr",
"output_type": "stream",
"text": [
"/Users/agsorok/miniconda3/envs/lddata/lib/python3.13/site-packages/qmcpy/discrete_distribution/digital_net_b2/digital_net_b2.py:389\n",
"\tParameterWarning: Non-randomized DigitalNetB2 sequence includes the origin\n"
"/Users/terrya/miniconda3/envs/lddata/lib/python3.13/site-packages/qmcpy/discrete_distribution/digital_net_b2/digital_net_b2.py:421: ParameterWarning: Without randomization, the first digtial net point is the origin\n",
" warnings.warn(\"Without randomization, the first digtial net point is the origin\",ParameterWarning)\n"
]
},
{
Expand Down Expand Up @@ -302,14 +302,6 @@
"generators = [qp.DigitalNetB2(d,randomize=False,generating_matrices=file) for file in files]\n",
"plot_extensible_projections(generators,files,n=n)"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "b8ef5511",
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
Expand Down
315 changes: 315 additions & 0 deletions LD_DATA.md

Large diffs are not rendered by default.

13 changes: 13 additions & 0 deletions LICENSE.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
Copyright [2021] [Illinois Institute of Technology]

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
409 changes: 180 additions & 229 deletions README.md

Large diffs are not rendered by default.

5 changes: 3 additions & 2 deletions env.yml
Original file line number Diff line number Diff line change
Expand Up @@ -101,8 +101,8 @@ dependencies:
- python-json-logger==3.3.0
- pyyaml==6.0.2
- pyzmq==26.3.0
- qmcpy==1.6.2
- qmctoolscl==1.1.2
- qmcpy==2.0
- qmctoolscl==1.1.5
Comment thread
sou-cheng-choi marked this conversation as resolved.
- referencing==0.36.2
- requests==2.32.3
- rfc3339-validator==0.1.4
Expand All @@ -126,3 +126,4 @@ dependencies:
- webcolors==24.11.1
- webencodings==0.5.1
- websocket-client==1.8.0
- huggingface_hub>=0.32.0
220 changes: 220 additions & 0 deletions scripts/git_lfs_upload.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,220 @@
#!/usr/bin/env bash
# git_lfs_upload.sh
#
# Automate uploading folders/files to a Hugging Face dataset repo using git + git-lfs.
# This preserves folder structure, avoids API rate limits, and handles large files.
#
# Usage:
# 1) Make executable:
# chmod +x scripts/git_lfs_upload.sh
# 2) Run:
# ./scripts/git_lfs_upload.sh \
# --repo-id QMCSoftware/LDData \
# --local-path /Users/terrya/Documents/ProgramData/LDData \
# --folders dnet,lattice,pregenerated_pointsets,README.md \
# --branch main
#
# Environment notes:
# - If the repo is private, set HF_TOKEN in your environment or pass --token.
# If HF_TOKEN is present, the script will embed it into the clone URL for non-interactive auth.
# - Install prerequisites: git, git-lfs, rsync (macOS: `brew install git-lfs rsync`).
# - The script commits and pushes each folder separately to reduce the size of each push.

set -euo pipefail
IFS=$'\n\t'

REPO_ID=""
LOCAL_PATH="."
FOLDERS=""
BRANCH="main"
HF_TOKEN="${HF_TOKEN:-}"
CLONE_DIR="hf_repo"
EXCLUDES=()
LFS_PATTERNS=("*.bin" "*.zip" "*.tar" "*.tgz" "*.h5" "*.npy" "*.npz" "*.ckpt" "*.pt" "*.pth" "*.gz")

print_usage() {
cat <<EOF
Usage: $0 --repo-id ORG/Repo [options]

Options:
--repo-id <org/repo> Hugging Face dataset repo id (required)
--local-path <path> Local LDData root (default: .)
--folders <comma-list> Comma-separated folders/files to upload (e.g. dnet,lattice,README.md)
--branch <branch> Git branch to push to (default: main)
--token <hf_token> Hugging Face token (or set HF_TOKEN env var)
--clone-dir <path> Directory to clone the repo into (default: hf_repo)
--exclude <pattern> Add an rsync exclude pattern (can be supplied multiple times)
-h, --help Show this help and exit

Example:
$0 --repo-id QMCSoftware/LDData --local-path . --folders dnet,lattice,README.md

This script:
- clones the target dataset repo,
- copies the requested folders/files into the clone (preserving structure),
- enables git-lfs for common large file types,
- commits and pushes each folder/file separately to reduce push sizes.

EOF
}

# Simple arg parsing
while [[ $# -gt 0 ]]; do
case "$1" in
--repo-id)
REPO_ID="$2"; shift 2;;
--local-path)
LOCAL_PATH="$2"; shift 2;;
--folders)
FOLDERS="$2"; shift 2;;
--branch)
BRANCH="$2"; shift 2;;
--token)
HF_TOKEN="$2"; shift 2;;
--clone-dir)
CLONE_DIR="$2"; shift 2;;
--exclude)
EXCLUDES+=("$2"); shift 2;;
-h|--help)
print_usage; exit 0;;
*)
echo "Unknown arg: $1" >&2; print_usage; exit 2;;
esac
done

if [[ -z "$REPO_ID" ]]; then
echo "--repo-id is required" >&2
print_usage
exit 2
fi

# Normalize LOCAL_PATH
LOCAL_PATH=$(cd "$LOCAL_PATH" && pwd)

echo "Repo ID: $REPO_ID"
echo "Local path: $LOCAL_PATH"
echo "Folders: $FOLDERS"
echo "Branch: $BRANCH"

# Check dependencies
command -v git >/dev/null 2>&1 || { echo "git not found; install git." >&2; exit 1; }
command -v rsync >/dev/null 2>&1 || { echo "rsync not found; install rsync." >&2; exit 1; }
if ! command -v git-lfs >/dev/null 2>&1; then
echo "git-lfs not found; installing is recommended. Please install git-lfs and run 'git lfs install'." >&2
echo "On macOS: brew install git-lfs && git lfs install" >&2
read -p "Continue without git-lfs? [y/N]: " c
if [[ "$c" != "y" && "$c" != "Y" ]]; then
exit 1
fi
fi

# Prepare clone URL
if [[ -n "$HF_TOKEN" ]]; then
# Embed token for non-interactive auth (note: exposing token in process list may be a security risk)
CLONE_URL="https://${HF_TOKEN}@huggingface.co/datasets/${REPO_ID}.git"
Comment thread
sou-cheng-choi marked this conversation as resolved.
Outdated
else
CLONE_URL="https://huggingface.co/datasets/${REPO_ID}.git"
fi

# Clone the repo
if [[ -d "$CLONE_DIR" ]]; then
echo "Removing existing clone dir $CLONE_DIR"
rm -rf "$CLONE_DIR"
fi

echo "Cloning ${CLONE_URL} -> ${CLONE_DIR}"
if ! git clone --depth 1 --branch "$BRANCH" "$CLONE_URL" "$CLONE_DIR"; then
echo "Initial clone failed; trying full clone (no depth)"
git clone --branch "$BRANCH" "$CLONE_URL" "$CLONE_DIR"
fi

pushd "$CLONE_DIR" >/dev/null

# Configure git user if not set
if ! git config user.email >/dev/null; then
git config user.email "uploader@example.com"
fi
if ! git config user.name >/dev/null; then
git config user.name "LDData uploader"
fi

# Ensure branch exists locally
git checkout -B "$BRANCH"

# Set up git-lfs patterns (only add if git-lfs available)
if command -v git-lfs >/dev/null 2>&1; then
echo "Configuring git-lfs patterns: ${LFS_PATTERNS[*]}"
for pat in "${LFS_PATTERNS[@]}"; do
git lfs track --no-update "$pat" || true
done
# Ensure .gitattributes is added
git add .gitattributes || true
git commit -m "Add git-lfs tracking patterns" --allow-empty || true
fi

# Helper to build rsync exclude args
RSYNC_EXCLUDE_ARGS=()
for ex in "${EXCLUDES[@]}"; do
RSYNC_EXCLUDE_ARGS+=(--exclude "$ex")
done

# Copy function: copy a single folder or file into the clone preserving path
copy_item() {
local item="$1"
echo "Processing: $item"
if [[ -d "$LOCAL_PATH/$item" ]]; then
mkdir -p "$(dirname "$item")"
rsync -av --delete "${RSYNC_EXCLUDE_ARGS[@]}" "$LOCAL_PATH/$item" ./
elif [[ -f "$LOCAL_PATH/$item" ]]; then
mkdir -p "$(dirname "$item")"
rsync -av "${RSYNC_EXCLUDE_ARGS[@]}" "$LOCAL_PATH/$item" ./
else
echo "Warning: $item not found in $LOCAL_PATH; skipping"
fi
}

# Commit & push a path (folder or file)
commit_and_push() {
local path="$1"
git add --all "$path" || true
if git diff --staged --quiet; then
echo "No changes staged for $path"
return
fi
git commit -m "Upload $path" || true
echo "Pushing $path to origin/$BRANCH"
git push origin "$BRANCH"
}

# If folders list is empty, upload whole workspace excluding excludes
if [[ -z "$FOLDERS" ]]; then
echo "No --folders provided; copying whole local tree (respecting excludes)."
rsync -av --delete "${RSYNC_EXCLUDE_ARGS[@]}" "$LOCAL_PATH/" ./
commit_and_push "."
else
# iterate comma-separated list
IFS=',' read -ra ITEMS <<< "$FOLDERS"
for it in "${ITEMS[@]}"; do
it_trimmed=$(echo "$it" | sed 's/^\s*//;s/\s*$//')
if [[ -z "$it_trimmed" ]]; then
continue
fi
copy_item "$it_trimmed"
commit_and_push "$it_trimmed"
# pause between folder pushes to avoid network bursts
sleep 3
done
fi

# Final push of any remaining changes
git add --all || true
if ! git diff --staged --quiet; then
git commit -m "Upload remaining files" || true
git push origin "$BRANCH"
fi

popd >/dev/null

echo "Upload complete. Repository at: https://huggingface.co/datasets/${REPO_ID}"

# End of script
Loading
Loading