Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

bug fix for weekly summary clip collection from S3 #145

Merged
merged 1 commit into from
Feb 13, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
163 changes: 80 additions & 83 deletions pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,113 +1,110 @@
[build-system]
requires = ["setuptools>=64", "setuptools-scm[toml]>=6.2", "wheel"]
build-backend = "setuptools.build_meta"

build-backend = "setuptools.build_meta"
requires = ["setuptools-scm[toml]>=6.2", "setuptools>=64", "wheel"]

[project]
name = "offsets-db-api"
description = "fastAPI for offsets Database"
readme = "README.md"
license = { text = "MIT" }
authors = [{ name = "CarbonPlan", email = "tech@carbonplan.org" }]
requires-python = ">=3.10"
classifiers = [
"Development Status :: 4 - Beta",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Intended Audience :: Science/Research",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.10",
"Topic :: Scientific/Engineering",
]

dynamic = ["version", "dependencies"]
authors = [{ name = "CarbonPlan", email = "tech@carbonplan.org" }]
classifiers = [
"Development Status :: 4 - Beta",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python",
"Topic :: Scientific/Engineering",
]
description = "fastAPI for offsets Database"
license = { text = "MIT" }
name = "offsets-db-api"
readme = "README.md"
requires-python = ">=3.10"

dynamic = ["dependencies", "version"]

[tool.setuptools.dynamic]

dependencies = { file = ["requirements.txt"] }
optional-dependencies = { dev = { file = ["requirements-dev.txt"] } }
dependencies = { file = ["requirements.txt"] }
optional-dependencies = { dev = { file = ["requirements-dev.txt"] } }

[project.urls]
repository = "https://github.com/carbonplan/offsets-db-api"
repository = "https://github.com/carbonplan/offsets-db-api"

[tool.setuptools.packages.find]
include = ["offsets_db_api*"]
include = ["offsets_db_api*"]

[tool.setuptools_scm]
version_scheme = "post-release"
local_scheme = "node-and-date"
fallback_version = "999"
fallback_version = "999"
local_scheme = "node-and-date"
version_scheme = "post-release"

[tool.coverage.run]
omit = ["tests/*", "gunicorn_config.py"]
branch = true

branch = true
omit = ["gunicorn_config.py", "tests/*"]

[tool.ruff]
line-length = 100
target-version = "py310"
builtins = ["ellipsis"]
# Exclude a variety of commonly ignored directories.
exclude = [
".bzr",
".direnv",
".eggs",
".git",
".hg",
".mypy_cache",
".nox",
".pants.d",
".ruff_cache",
".svn",
".tox",
".venv",
"__pypackages__",
"_build",
"buck-out",
"build",
"dist",
"node_modules",
"venv",
]
builtins = ["ellipsis"]
line-length = 100
target-version = "py310"
# Exclude a variety of commonly ignored directories.
exclude = [
".bzr",
".direnv",
".eggs",
".git",
".hg",
".mypy_cache",
".nox",
".pants.d",
".ruff_cache",
".svn",
".tox",
".venv",
"__pypackages__",
"_build",
"buck-out",
"build",
"dist",
"node_modules",
"venv",
]

[tool.ruff.lint]
per-file-ignores = {}
ignore = [
"E721", # Comparing types instead of isinstance
"E741", # Ambiguous variable names
"E501", # Conflicts with ruff format
]
select = [
# Pyflakes
"F",
# Pycodestyle
"E",
"W",
# isort
"I",
# Pyupgrade
"UP",
]

ignore = [
"E501", # Conflicts with ruff format
"E721", # Comparing types instead of isinstance
"E741", # Ambiguous variable names
]
per-file-ignores = {}
select = [
# Pyflakes
"F",
# Pycodestyle
"E",
"W",
# isort
"I",
# Pyupgrade
"UP",
]

[tool.ruff.lint.mccabe]
max-complexity = 18
max-complexity = 18

[tool.ruff.lint.isort]
known-first-party = ["offsets_db_api"]
combine-as-imports = true
combine-as-imports = true
known-first-party = ["offsets_db_api"]

[tool.ruff.format]
quote-style = "single"
quote-style = "single"

[tool.ruff.lint.pydocstyle]
convention = "numpy"
convention = "numpy"

[tool.ruff.lint.pyupgrade]
# Preserve types, even if a file imports `from __future__ import annotations`.
keep-runtime-typing = true
# Preserve types, even if a file imports `from __future__ import annotations`.
keep-runtime-typing = true

[tool.pytest.ini_options]
console_output_style = "count"
addopts = "--cov=./ --cov-report=xml --cov-report=term-missing --verbose"
addopts = "--cov=./ --cov-report=xml --cov-report=term-missing --verbose"
console_output_style = "count"
7 changes: 5 additions & 2 deletions update_database.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@


def generate_path(*, date: datetime.date, bucket: str, category: str) -> str:
return f"{bucket.rstrip('/')}/final/{date.strftime('%Y-%m-%d')}/{category}.parquet"
return f'{bucket.rstrip("/")}/final/{date.strftime("%Y-%m-%d")}/{category}.parquet'


def calculate_date(*, days_back: int) -> datetime.date:
Expand Down Expand Up @@ -50,14 +50,17 @@ def get_latest(*, bucket: str):
added_weeks = set()

for entry in date_ranges:
week_num = entry.isocalendar()[1]
value = entry.isocalendar()
week_num = f'{value.year}-{value.week}'
if week_num not in added_weeks:
weekly_summary_path = generate_path(
date=entry.date(), bucket=bucket, category='weekly-summary-clips'
)
if fs.exists(weekly_summary_path):
data.append({'category': 'clips', 'url': weekly_summary_path})
added_weeks.add(week_num)
else:
print(f"weekly summary path {weekly_summary_path} doesn't exist")

return data

Expand Down
Loading