Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
25 commits
Select commit Hold shift + click to select a range
47a6d07
ci: add release helper
sgratzl Jun 17, 2021
2a2987b
ci: add more label categories to release drafter
sgratzl Jun 17, 2021
03f5fdd
ci: rename secrets
sgratzl Jun 18, 2021
e15a2f5
Merge pull request #605 from cmu-delphi/main
krivard Jun 21, 2021
7689b8c
Merge branch 'dev' into sgratzl/release_helper
sgratzl Jun 22, 2021
81951e0
Merge pull request #600 from cmu-delphi/sgratzl/release_helper
krivard Jun 22, 2021
b34d5dd
initial changes, not done yet
Jun 24, 2021
899bab3
modified others print(), pass the integration / unit test
zhuoran-Cheng16 Jun 24, 2021
9fd0d0c
fix the part of compute_covidcast_meta
zhuoran-Cheng16 Jun 24, 2021
fe78330
modified the exception part
zhuoran-Cheng16 Jun 24, 2021
84bd4d6
Update of the csv_importer, csv_to_databse
zhuoran-Cheng16 Jun 24, 2021
91ed202
whitespace edits
Jun 24, 2021
f1f72b0
delete white space
zhuoran-Cheng16 Jun 24, 2021
567cd03
Merge branch 'covidcast-logging' of https://github.com/cmu-delphi/del…
zhuoran-Cheng16 Jun 24, 2021
8c5d53c
delete white space
zhuoran-Cheng16 Jun 24, 2021
af2d10a
modified the problem included structure logging of warning
zhuoran-Cheng16 Jun 25, 2021
56aa100
add the argument logger to func handle_faild/handle_successful
zhuoran-Cheng16 Jun 28, 2021
3f5efbd
update
zhuoran-Cheng16 Jun 28, 2021
dd0aa56
modified some structure logging
zhuoran-Cheng16 Jun 29, 2021
2447452
delete some unneccesary part of logger
zhuoran-Cheng16 Jun 29, 2021
5b7ae88
delete some wrong typing
zhuoran-Cheng16 Jun 29, 2021
dce532a
Merge pull request #611 from cmu-delphi/covidcast-logging
krivard Jun 29, 2021
13abc7b
Add documentation for 7dav HHS signals
chinandrew Jun 29, 2021
ca5c781
Merge pull request #615 from cmu-delphi/hhs-docs
krivard Jun 29, 2021
2c15d31
chore: release delphi-epidata 0.1.2
krivard Jun 30, 2021
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .bumpversion.cfg
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
[bumpversion]
current_version = 0.1.1
current_version = 0.1.2
commit = False
tag = False

Expand Down
31 changes: 31 additions & 0 deletions .github/release-drafter.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
name-template: "v$RESOLVED_VERSION"
tag-template: "v$RESOLVED_VERSION"
categories:
- title: "🚀 API Changes"
labels:
- "api change"
- title: "🚀 Python Client Changes"
labels:
- "python client"
- title: "🚀 R Client Changes"
labels:
- "r client"
- title: "🚀 JavaScript Client Changes"
labels:
- "js client"
- "javascript"
- title: "📕 Documentation"
labels:
- "documentation"
- title: "🧰 Development"
labels:
- "chore"
- "documentation"
- "dependencies"
- "acquisition"
change-template: "- #$NUMBER $TITLE"
change-title-escapes: '\<*_&`#@'
template: |
$CHANGES

Thanks to $CONTRIBUTORS
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
name: Create Delphi Epidata Release
name: Create Release

on:
workflow_dispatch:
Expand Down
153 changes: 153 additions & 0 deletions .github/workflows/release-helper.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,153 @@
name: Release Helper

on:
push:
branches:
- main

jobs:
correct_repository:
runs-on: ubuntu-latest
steps:
- name: fail on fork
if: github.repository_owner != 'cmu-delphi'
run: exit 1

create_release:
needs: correct_repository
runs-on: ubuntu-latest
steps:
- name: Check out code
uses: actions/checkout@v2
with:
ssh-key: ${{ secrets.CMU_DELPHI_DEPLOY_MACHINE_SSH }}
- name: Set up Python 3.8
uses: actions/setup-python@v2
with:
python-version: 3.8
- name: Extract version
id: extract_version
run: |
python -m pip install bump2version
echo -n "::set-output name=version::"
bump2version --dry-run --list patch | grep ^current_version | sed -r s,"^.*=",,
- name: Create Release
id: create_release
uses: release-drafter/release-drafter@v5
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
version: ${{ steps.extract_version.outputs.version }}
publish: true
outputs:
version: ${{ steps.extract_version.outputs.version }}
upload_url: ${{ steps.create_release.outputs.upload_url }}
tag_name: ${{ steps.create_release.outputs.tag_name }}

release_python_client:
needs: create_release
runs-on: ubuntu-latest
steps:
- name: Check out code
uses: actions/checkout@v2
- name: Set up Python 3.8
uses: actions/setup-python@v2
with:
python-version: 3.8
- name: Install build dependencies
run: |
python -m pip install --upgrade pip
pip install wheel twine
- name: Prepare package
run: |
cp src/client/*.py src/client/packaging/pypi/delphi_epidata/
- name: Create release
working-directory: src/client/packaging/pypi
run: |
python setup.py sdist bdist_wheel
- uses: actions/upload-artifact@v2
with:
name: delphi_epidata_py
path: src/client/packaging/pypi/dist/*.tar.gz
- name: Upload Release Asset
uses: AButler/[email protected]
with:
files: "src/client/packaging/pypi/dist/*.tar.gz"
repo-token: ${{ secrets.GITHUB_TOKEN }}
release-tag: ${{ needs.create_release.outputs.tag_name }}
- name: Publish a Python distribution to PyPI
uses: pypa/gh-action-pypi-publish@release/v1
with:
user: __token__
password: ${{ secrets.DELPHI_PYPI_PROD_TOKEN }}
packages_dir: src/client/packaging/pypi/dist/
skip_existing: true
# repository_url: https://test.pypi.org/legacy/

release_js_client:
needs: create_release
runs-on: ubuntu-latest
defaults:
run:
working-directory: src/client/packaging/npm
steps:
- name: Check out code
uses: actions/checkout@v2
- uses: actions/setup-node@v2
with:
node-version: '14.x'
- name: Cache Node.js modules
uses: actions/cache@v2
with:
path: ~/.npm # npm cache files are stored in `~/.npm` on Linux/macOS
key: ${{ runner.OS }}-node2-${{ hashFiles('**/package-lock.json') }}
restore-keys: |
${{ runner.OS }}-node2-
- run: npm ci
- run: npm test
- run: npm pack
- name: Rename to a different name
run: for f in *.tgz; do mv "$f" "$(echo "$f" | sed s/delphi_epidata-/delphi_epidata_js-/)"; done
- uses: actions/upload-artifact@v2
with:
name: delphi_epidata_js
path: src/client/packaging/npm/*.tgz
- name: Upload Release Asset
uses: AButler/[email protected]
with:
files: "src/client/packaging/npm/*.tgz"
repo-token: ${{ secrets.GITHUB_TOKEN }}
release-tag: ${{ needs.create_release.outputs.tag_name }}
- name: Publish to NPM
uses: JS-DevTools/npm-publish@v1
with:
token: ${{ secrets.DELPHI_NPM_TOKEN }}
package: src/client/packaging/npm/package.json
access: public
check-version: true

sync_dev:
needs: correct_repository
runs-on: ubuntu-latest
steps:
- name: Check out code
uses: actions/checkout@v2
with:
ref: dev
ssh-key: ${{ secrets.CMU_DELPHI_DEPLOY_MACHINE_SSH }}
- name: Reset dev branch
run: |
git fetch origin main:main
git reset --hard main
- name: Create pull request into dev
uses: peter-evans/create-pull-request@v3
with:
branch: bot/sync-main-dev
commit-message: "chore: sync main-dev"
base: dev
title: "chore: sync main->dev"
labels: chore
reviewers: krivard
assignees: krivard
body: |
Syncing Main->Dev.
12 changes: 8 additions & 4 deletions docs/api/covidcast-signals/hhs.md
Original file line number Diff line number Diff line change
Expand Up @@ -30,10 +30,14 @@ the sum of all adult and pediatric COVID-19 hospital admissions. This
sum is used as the "ground truth" for hospitalizations by the [COVID-19 Forecast Hub](https://github.com/reichlab/covid19-forecast-hub/blob/master/data-processed/README.md#hospitalizations).


| Signal | Geography | Resolution | Description |
| --- | --- | --- | --- |
| `confirmed_admissions_covid_1d` | state | 1 day | Sum of adult and pediatric confirmed COVID-19 hospital admissions occurring each day. <br/> **Earliest date available:** 2019-12-31 |
| `sum_confirmed_suspected_admissions_covid_1d` | state | 1 day | Sum of adult and pediatric confirmed and suspected COVID-19 hospital admissions occurring each day. <br/> **Earliest date available:** 2019-12-31 |
| Signal | 7-day average signal | Geography | Resolution | Description |
| --- | --- | --- | --- | --- |
| `confirmed_admissions_covid_1d` | `confirmed_admissions_covid_1d_7dav`| state | 1 day | Sum of adult and pediatric confirmed COVID-19 hospital admissions occurring each day. <br/> **Earliest date available:** 2019-12-31 |
| `sum_confirmed_suspected_admissions_covid_1d` | `sum_confirmed_suspected_admissions_covid_1d_7dav` | state | 1 day | Sum of adult and pediatric confirmed and suspected COVID-19 hospital admissions occurring each day. <br/> **Earliest date available:** 2019-12-31 |

The 7-day average signals are computed by Delphi by calculating
moving averages of the preceding 7 days, so e.g. the signal for June 7 is the
average of the underlying data for June 1 through 7, inclusive.

## Table of contents
{: .no_toc .text-delta}
Expand Down
6 changes: 3 additions & 3 deletions src/acquisition/covidcast/covidcast_meta_cache_updater.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,18 +48,18 @@ def main(args, epidata_impl=Epidata, database_impl=Database):
if len(metadata)==0:
args = ("no results",-2)

print('covidcast_meta result: %s (code %d)' % args)
logger.info('covidcast_meta result: %s (code %d)' % args)

if args[-1] != 1:
print('unable to cache epidata')
logger.error('unable to cache epidata')
return False

# update the cache
try:
metadata_update_start_time = time.time()
database.update_covidcast_meta_cache(metadata)
metadata_update_interval_in_seconds = time.time() - metadata_update_start_time
print('successfully cached epidata')
logger.info('successfully cached epidata')
finally:
# no catch block so that an exception above will cause the program to
# fail after the following cleanup
Expand Down
27 changes: 13 additions & 14 deletions src/acquisition/covidcast/csv_importer.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
# first party
from delphi_utils import Nans
from delphi.utils.epiweek import delta_epiweeks
from delphi.epidata.acquisition.covidcast.logger import get_structured_logger

class CsvImporter:
"""Finds and parses covidcast CSV files."""
Expand Down Expand Up @@ -84,16 +85,17 @@ def is_sane_week(value):

@staticmethod
def find_issue_specific_csv_files(scan_dir, glob=glob):
logger = get_structured_logger('find_issue_specific_csv_files')
for path in sorted(glob.glob(os.path.join(scan_dir, '*'))):
issuedir_match = CsvImporter.PATTERN_ISSUE_DIR.match(path.lower())
if issuedir_match and os.path.isdir(path):
issue_date_value = int(issuedir_match.group(2))
issue_date = CsvImporter.is_sane_day(issue_date_value)
if issue_date:
print(' processing csv files from issue date: "' + str(issue_date) + '", directory', path)
logger.info('processing csv files from issue date: "' + str(issue_date) + '", directory', path)
yield from CsvImporter.find_csv_files(path, issue=(issue_date, epi.Week.fromdate(issue_date)), glob=glob)
else:
print(' invalid issue directory day', issue_date_value)
logger.warning(event='invalid issue directory day', detail=issue_date_value, file=path)

@staticmethod
def find_csv_files(scan_dir, issue=(date.today(), epi.Week.fromdate(date.today())), glob=glob):
Expand All @@ -105,7 +107,7 @@ def find_csv_files(scan_dir, issue=(date.today(), epi.Week.fromdate(date.today()
valid, details is a tuple of (source, signal, time_type, geo_type,
time_value, issue, lag) (otherwise None).
"""

logger = get_structured_logger('find_csv_files')
issue_day,issue_epiweek=issue
issue_day_value=int(issue_day.strftime("%Y%m%d"))
issue_epiweek_value=int(str(issue_epiweek))
Expand All @@ -117,14 +119,11 @@ def find_csv_files(scan_dir, issue=(date.today(), epi.Week.fromdate(date.today()
if not path.lower().endswith('.csv'):
# safe to ignore this file
continue

print('file:', path)

# match a daily or weekly naming pattern
daily_match = CsvImporter.PATTERN_DAILY.match(path.lower())
weekly_match = CsvImporter.PATTERN_WEEKLY.match(path.lower())
if not daily_match and not weekly_match:
print(' invalid csv path/filename', path)
logger.warning(event='invalid csv path/filename', detail=path, file=path)
yield (path, None)
continue

Expand All @@ -135,7 +134,7 @@ def find_csv_files(scan_dir, issue=(date.today(), epi.Week.fromdate(date.today()
match = daily_match
time_value_day = CsvImporter.is_sane_day(time_value)
if not time_value_day:
print(' invalid filename day', time_value)
logger.warning(event='invalid filename day', detail=time_value, file=path)
yield (path, None)
continue
issue_value=issue_day_value
Expand All @@ -146,7 +145,7 @@ def find_csv_files(scan_dir, issue=(date.today(), epi.Week.fromdate(date.today()
match = weekly_match
time_value_week=CsvImporter.is_sane_week(time_value)
if not time_value_week:
print(' invalid filename week', time_value)
logger.warning(event='invalid filename week', detail=time_value, file=path)
yield (path, None)
continue
issue_value=issue_epiweek_value
Expand All @@ -155,15 +154,15 @@ def find_csv_files(scan_dir, issue=(date.today(), epi.Week.fromdate(date.today()
# # extract and validate geographic resolution
geo_type = match.group(3).lower()
if geo_type not in CsvImporter.GEOGRAPHIC_RESOLUTIONS:
print(' invalid geo_type', geo_type)
logger.warning(event='invalid geo_type', detail=geo_type, file=path)
yield (path, None)
continue

# extract additional values, lowercased for consistency
source = match.group(1).lower()
signal = match.group(4).lower()
if len(signal) > 64:
print(' invalid signal name (64 char limit)',signal)
logger.warning(event='invalid signal name (64 char limit)',detail=signal, file=path)
yield (path, None)
continue

Expand Down Expand Up @@ -344,19 +343,19 @@ def load_csv(filepath, geo_type, pandas=pandas):
In case of a validation error, `None` is yielded for the offending row,
including the header.
"""

logger = get_structured_logger('load_csv')
# don't use type inference, just get strings
table = pandas.read_csv(filepath, dtype='str')

if not CsvImporter.is_header_valid(table.columns):
print(' invalid header')
logger.warning(event='invalid header', detail=table.columns, file=filepath)
yield None
return

for row in table.itertuples(index=False):
row_values, error = CsvImporter.extract_and_check_row(row, geo_type)
if error:
print(' invalid value for %s (%s)' % (str(row), error))
logger.warning(event = 'invalid value for row', detail=(str(row), error), file=filepath)
yield None
continue
yield row_values
Loading