Skip to content
This repository has been archived by the owner on Jun 2, 2023. It is now read-only.

Commit

Permalink
update function name
Browse files Browse the repository at this point in the history
  • Loading branch information
amsnyder committed Jan 7, 2022
1 parent e9e5055 commit 8dbb810
Showing 1 changed file with 2 additions and 2 deletions.
4 changes: 2 additions & 2 deletions 01_fetch/src/fetch_usgs.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ def fetch_site_info(site_num, outfile):
print(f'fetcing site info for site {site_num} and saving locally')
urllib.request.urlretrieve(site_url, outfile)

def process_site_info_to_df(raw_site_info_txt, site_info_outfile_csv, s3_bucket, write_location, s3_client):
def process_site_info_to_csv(raw_site_info_txt, site_info_outfile_csv, s3_bucket, write_location, s3_client):
'''
process raw site info text file into a csv file,
return minimum date of measured data (for any parameter) as start date for site
Expand Down Expand Up @@ -89,7 +89,7 @@ def main():
site_info_outfile_txt = os.path.join('.', '01_fetch', 'out', 'metadata', f'usgs_nwis_site_info_{site_num}.txt')
fetch_site_info(site_num, site_info_outfile_txt)
site_info_outfile_csv = os.path.join('.', '01_fetch', 'out', 'metadata', f'usgs_nwis_site_info_{site_num}.csv')
start_dt = process_site_info_to_df(site_info_outfile_txt, site_info_outfile_csv, s3_bucket, write_location, s3_client)
start_dt = process_site_info_to_csv(site_info_outfile_txt, site_info_outfile_csv, s3_bucket, write_location, s3_client)
end_dt = datetime.datetime.today().strftime("%Y-%m-%d")

# start and end dates for data fetch
Expand Down

0 comments on commit 8dbb810

Please sign in to comment.