Skip to content
This repository was archived by the owner on Sep 11, 2023. It is now read-only.

Commit 5f4e2c3

Browse files
Merge pull request #420 from openclimatefix/bug/261-download-eso-metadata
Bug/261 download eso metadata
2 parents b47bcbf + 5d8d2a9 commit 5f4e2c3

File tree

7 files changed

+379
-16
lines changed

7 files changed

+379
-16
lines changed

nowcasting_dataset/data_sources/gsp/eso.py

Lines changed: 37 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -40,33 +40,54 @@
4040
rename_load_columns = {v: k for k, v in rename_save_columns.items()}
4141

4242

43-
def get_gsp_metadata_from_eso(calculate_centroid: bool = True) -> pd.DataFrame:
43+
def get_gsp_metadata_from_eso(
44+
calculate_centroid: bool = True, load_local_file: bool = True, save_local_file: bool = False
45+
) -> pd.DataFrame:
4446
"""
4547
Get the metadata for the gsp, from ESO.
4648
4749
Args:
4850
calculate_centroid: Load the shape file also, and calculate the Centroid
51+
load_local_file: Load from a local file, not from ESO
52+
save_local_file: Save to a local file, only need to do this is Data is updated.
4953
5054
Returns: Dataframe of ESO Metadata
5155
5256
"""
5357
logger.debug("Getting GSP shape file")
5458

55-
# call ESO website. There is a possibility that this API will be replaced and its unclear if
56-
# this original API will will stay operational
57-
url = (
58-
"https://data.nationalgrideso.com/api/3/action/datastore_search?"
59-
"resource_id=bbe2cc72-a6c6-46e6-8f4e-48b879467368&limit=400"
60-
)
61-
with urllib.request.urlopen(url) as fileobj:
62-
d = json.loads(fileobj.read())
63-
64-
# make dataframe
65-
results = d["result"]["records"]
66-
metadata = pd.DataFrame(results)
67-
68-
# drop duplicates
69-
metadata = metadata.drop_duplicates(subset=["gsp_id"])
59+
local_file = f"{os.path.dirname(os.path.realpath(__file__))}/eso_metadata.csv"
60+
61+
if not os.path.isfile(local_file):
62+
logger.debug("There is no local file so going to get it from ESO, and save it afterwards")
63+
load_local_file = False
64+
save_local_file = True
65+
66+
if load_local_file:
67+
logger.debug("loading local file for ESO metadata")
68+
metadata = pd.read_csv(local_file)
69+
# rename the columns to full name
70+
logger.debug("loading local file for ESO metadata:done")
71+
else:
72+
# call ESO website. There is a possibility that this API will be replaced and its unclear if
73+
# this original API will will stay operational
74+
url = (
75+
"https://data.nationalgrideso.com/api/3/action/datastore_search?"
76+
"resource_id=bbe2cc72-a6c6-46e6-8f4e-48b879467368&limit=400"
77+
)
78+
with urllib.request.urlopen(url) as fileobj:
79+
d = json.loads(fileobj.read())
80+
81+
# make dataframe
82+
results = d["result"]["records"]
83+
metadata = pd.DataFrame(results)
84+
85+
# drop duplicates
86+
metadata = metadata.drop_duplicates(subset=["gsp_id"])
87+
88+
if save_local_file:
89+
# save file
90+
metadata.to_csv(local_file)
7091

7192
if calculate_centroid:
7293
# get shape data from eso

nowcasting_dataset/data_sources/gsp/eso_metadata.csv

Lines changed: 340 additions & 0 deletions
Large diffs are not rendered by default.
Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
ISO-8859-1
62.9 KB
Binary file not shown.
Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
PROJCS["British_National_Grid",GEOGCS["GCS_OSGB_1936",DATUM["D_OSGB_1936",SPHEROID["Airy_1830",6377563.396,299.3249646]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",400000.0],PARAMETER["False_Northing",-100000.0],PARAMETER["Central_Meridian",-2.0],PARAMETER["Scale_Factor",0.9996012717],PARAMETER["Latitude_Of_Origin",49.0],UNIT["Meter",1.0]]
1.01 MB
Binary file not shown.
2.67 KB
Binary file not shown.

0 commit comments

Comments
 (0)