|
40 | 40 | rename_load_columns = {v: k for k, v in rename_save_columns.items()} |
41 | 41 |
|
42 | 42 |
|
43 | | -def get_gsp_metadata_from_eso(calculate_centroid: bool = True) -> pd.DataFrame: |
| 43 | +def get_gsp_metadata_from_eso( |
| 44 | + calculate_centroid: bool = True, load_local_file: bool = True, save_local_file: bool = False |
| 45 | +) -> pd.DataFrame: |
44 | 46 | """ |
45 | 47 | Get the metadata for the gsp, from ESO. |
46 | 48 |
|
47 | 49 | Args: |
48 | 50 | calculate_centroid: Load the shape file also, and calculate the Centroid |
| 51 | + load_local_file: Load from a local file, not from ESO |
| 52 | + save_local_file: Save to a local file, only need to do this is Data is updated. |
49 | 53 |
|
50 | 54 | Returns: Dataframe of ESO Metadata |
51 | 55 |
|
52 | 56 | """ |
53 | 57 | logger.debug("Getting GSP shape file") |
54 | 58 |
|
55 | | - # call ESO website. There is a possibility that this API will be replaced and its unclear if |
56 | | - # this original API will will stay operational |
57 | | - url = ( |
58 | | - "https://data.nationalgrideso.com/api/3/action/datastore_search?" |
59 | | - "resource_id=bbe2cc72-a6c6-46e6-8f4e-48b879467368&limit=400" |
60 | | - ) |
61 | | - with urllib.request.urlopen(url) as fileobj: |
62 | | - d = json.loads(fileobj.read()) |
63 | | - |
64 | | - # make dataframe |
65 | | - results = d["result"]["records"] |
66 | | - metadata = pd.DataFrame(results) |
67 | | - |
68 | | - # drop duplicates |
69 | | - metadata = metadata.drop_duplicates(subset=["gsp_id"]) |
| 59 | + local_file = f"{os.path.dirname(os.path.realpath(__file__))}/eso_metadata.csv" |
| 60 | + |
| 61 | + if not os.path.isfile(local_file): |
| 62 | + logger.debug("There is no local file so going to get it from ESO, and save it afterwards") |
| 63 | + load_local_file = False |
| 64 | + save_local_file = True |
| 65 | + |
| 66 | + if load_local_file: |
| 67 | + logger.debug("loading local file for ESO metadata") |
| 68 | + metadata = pd.read_csv(local_file) |
| 69 | + # rename the columns to full name |
| 70 | + logger.debug("loading local file for ESO metadata:done") |
| 71 | + else: |
| 72 | + # call ESO website. There is a possibility that this API will be replaced and its unclear if |
| 73 | + # this original API will will stay operational |
| 74 | + url = ( |
| 75 | + "https://data.nationalgrideso.com/api/3/action/datastore_search?" |
| 76 | + "resource_id=bbe2cc72-a6c6-46e6-8f4e-48b879467368&limit=400" |
| 77 | + ) |
| 78 | + with urllib.request.urlopen(url) as fileobj: |
| 79 | + d = json.loads(fileobj.read()) |
| 80 | + |
| 81 | + # make dataframe |
| 82 | + results = d["result"]["records"] |
| 83 | + metadata = pd.DataFrame(results) |
| 84 | + |
| 85 | + # drop duplicates |
| 86 | + metadata = metadata.drop_duplicates(subset=["gsp_id"]) |
| 87 | + |
| 88 | + if save_local_file: |
| 89 | + # save file |
| 90 | + metadata.to_csv(local_file) |
70 | 91 |
|
71 | 92 | if calculate_centroid: |
72 | 93 | # get shape data from eso |
|
0 commit comments