|
5 | 5 | functions: |
6 | 6 | get_cds_index_daily - Get CDS Index daily data as a DataFrame using Ganymede gRPC API. |
7 | 7 | get_cds_index_intraday - Get CDS Index intraday data as a DataFrame using Ganymede gRPC API. |
| 8 | + get_cds_daily - Get CDS daily data as a DataFrame using Ganymede gRPC API. |
| 9 | + get_cds_intraday - Get CDS intraday data as a DataFrame using Ganymede gRPC API. |
8 | 10 | get_future_daily - Get future daily data as a DataFrame using Ganymede gRPC API. |
| 11 | + get_equity_daily - Get equity daily data as a DataFrame using Ganymede gRPC API. |
9 | 12 | """ |
10 | 13 |
|
11 | 14 |
|
@@ -192,7 +195,174 @@ def get_cds_index_daily(ticker, start_date=None, end_date=None, batch=None, sele |
192 | 195 | except Exception as e: |
193 | 196 | print(f"Error: {str(e)}") |
194 | 197 | return pd.DataFrame() |
| 198 | + |
| 199 | +def get_cds_daily(ticker, start_date=None, end_date=None, batch=None, selected_fields=None, provider="Markit"): |
| 200 | + """ |
| 201 | + Fetch CDS daily data from gRPC API for a given ticker and date range. |
| 202 | + |
| 203 | + Parameters: |
| 204 | + ticker (str): The ticker symbol |
| 205 | + start_date (datetime.date or str, optional): Start date for data retrieval. |
| 206 | + If None, set not limits |
| 207 | + end_date (datetime.date or str, optional): End date for data retrieval. |
| 208 | + If None, uses today's date |
| 209 | + batch (str, optional): Batch name to be used for filtering. If None, gets all batches. |
| 210 | + selected_fields (list, optional): List of specific fields to retrieve. If None, gets all fields. |
| 211 | + provider (str): Data provider, default is "Markit" |
| 212 | + |
| 213 | + Returns: |
| 214 | + pd.DataFrame: DataFrame with Date as index and all available fields as columns |
| 215 | + """ |
| 216 | + |
| 217 | + |
| 218 | + |
| 219 | + # All available fields for individual CDS |
| 220 | + all_fields = [ |
| 221 | + "ConventionalSpread", |
| 222 | + "ParSpread", |
| 223 | + "Upfront", |
| 224 | + "RealRecovery", |
| 225 | + "AssumedRecovery", |
| 226 | + "DefaultProbability", |
| 227 | + "JumpToDefault", |
| 228 | + "JumpToZero" |
| 229 | + ] |
| 230 | + |
| 231 | + # Use all fields if none specified, otherwise validate selected fields |
| 232 | + if selected_fields is None: |
| 233 | + fields = all_fields |
| 234 | + else: |
| 235 | + fields = [f for f in selected_fields if f in all_fields] |
| 236 | + if not fields: |
| 237 | + raise ValueError("No valid fields selected") |
| 238 | + |
| 239 | + # Create identifier for individual CDS (not index) |
| 240 | + id = identifier.Identifier( |
| 241 | + asset_type=asset.AssetType.ASSET_TYPE_CDS, |
| 242 | + ticker=ticker |
| 243 | + ) |
| 244 | + id.provider.value = provider |
195 | 245 |
|
| 246 | + # Build constraints only if we have at least one date |
| 247 | + constraints_obj = None |
| 248 | + if start_date is not None or end_date is not None: |
| 249 | + # Create DateInterval with only the dates that are provided |
| 250 | + date_interval_kwargs = {} |
| 251 | + if start_date is not None: |
| 252 | + date_interval_kwargs['start_date'] = _parse_date_input(start_date) |
| 253 | + if end_date is not None: |
| 254 | + date_interval_kwargs['end_date'] = _parse_date_input(end_date) |
| 255 | + |
| 256 | + constraints_obj = constraints.Constraints( |
| 257 | + date_intervals=[date_interval.DateInterval(**date_interval_kwargs)] |
| 258 | + ) |
| 259 | + |
| 260 | + if batch is None: |
| 261 | + # Create request with or without constraints |
| 262 | + request_kwargs = { |
| 263 | + 'identifier': id, |
| 264 | + 'fields': fields |
| 265 | + } |
| 266 | + if constraints_obj is not None: |
| 267 | + request_kwargs['constraints'] = constraints_obj |
| 268 | + |
| 269 | + try: |
| 270 | + # Open gRPC channel |
| 271 | + with channel_helpers.get_grpc_channel() as channel: |
| 272 | + # Send request and receive response |
| 273 | + token = token_helpers.get_token() |
| 274 | + first = True |
| 275 | + response = [] |
| 276 | + info = None |
| 277 | + # Create service stub |
| 278 | + service = get_daily_service.DailyServiceStub(channel) |
| 279 | + |
| 280 | + |
| 281 | + if batch is None: |
| 282 | + # Create request with or without constraints |
| 283 | + request_kwargs = { |
| 284 | + 'identifier': id, |
| 285 | + 'fields': fields |
| 286 | + } |
| 287 | + if constraints_obj is not None: |
| 288 | + request_kwargs['constraints'] = constraints_obj |
| 289 | + |
| 290 | + vector_request = get_daily.DailyRequest(**request_kwargs) |
| 291 | + |
| 292 | + for data in service.DailyVectorStream( |
| 293 | + request=vector_request, |
| 294 | + metadata=[('authorization', token)] |
| 295 | + ): |
| 296 | + if first: |
| 297 | + info = data |
| 298 | + first = False |
| 299 | + else: |
| 300 | + response.append(data.data) |
| 301 | + |
| 302 | + else: |
| 303 | + |
| 304 | + request_kwargs = { |
| 305 | + 'identifier': id, |
| 306 | + 'fields': fields, |
| 307 | + 'key': batch |
| 308 | + } |
| 309 | + if constraints_obj is not None: |
| 310 | + request_kwargs['constraints'] = constraints_obj |
| 311 | + |
| 312 | + vector_key_request = get_daily.DailyVectorKeyRequest(**request_kwargs) |
| 313 | + |
| 314 | + for data in service.DailyVectorKeyStream( |
| 315 | + request=vector_key_request, |
| 316 | + metadata=[('authorization', token)] |
| 317 | + ): |
| 318 | + if first: |
| 319 | + info = data |
| 320 | + first = False |
| 321 | + else: |
| 322 | + response.append(data.data) |
| 323 | + |
| 324 | + # Process the response |
| 325 | + if not response or info is None: |
| 326 | + print("No data received") |
| 327 | + return pd.DataFrame() |
| 328 | + |
| 329 | + # Get field indices |
| 330 | + available_fields = [f for f in info.info.fields] |
| 331 | + field_indices = {field: available_fields.index(field) |
| 332 | + for field in fields if field in available_fields} |
| 333 | + |
| 334 | + # Extract dates |
| 335 | + dates = [date(d.date.year, d.date.month, d.date.day) for d in response] |
| 336 | + |
| 337 | + # Extract keys |
| 338 | + keys = [b.key for b in response] |
| 339 | + |
| 340 | + # Create dictionary for DataFrame |
| 341 | + data_dict = {'Key': keys} |
| 342 | + |
| 343 | + # Extract data for each field |
| 344 | + for field_name, field_index in field_indices.items(): |
| 345 | + data_dict[field_name] = [b.data[field_index] for b in response] |
| 346 | + |
| 347 | + # Create DataFrame |
| 348 | + df = pd.DataFrame(data_dict, index=dates) |
| 349 | + df.index.name = 'Date' |
| 350 | + |
| 351 | + # Sort by date for better readability |
| 352 | + df = df.sort_index() |
| 353 | + |
| 354 | + |
| 355 | + return df |
| 356 | + |
| 357 | + except grpc.RpcError as e: |
| 358 | + print(f"gRPC Error: {e.code().name}") |
| 359 | + print(f"Details: {e.details()}") |
| 360 | + return pd.DataFrame() |
| 361 | + except Exception as e: |
| 362 | + print(f"Error: {str(e)}") |
| 363 | + return pd.DataFrame() |
| 364 | + |
| 365 | + |
196 | 366 | def get_cds_index_intraday(ticker, start_date=None, end_date=None, sampling=sampling.SAMPLING_ONE_MINUTE, selected_fields=None, provider="Markit"): |
197 | 367 | """ |
198 | 368 | Fetch CDS Index intraday data from gRPC API for a given ticker and date range. |
@@ -313,6 +483,130 @@ def get_cds_index_intraday(ticker, start_date=None, end_date=None, sampling=samp |
313 | 483 | print(f"Error: {str(e)}") |
314 | 484 | return pd.DataFrame() |
315 | 485 |
|
| 486 | +def get_cds_intraday(ticker, start_date=None, end_date=None, sampling=sampling.SAMPLING_ONE_MINUTE, selected_fields=None, provider="Markit"): |
| 487 | + """ |
| 488 | + Fetch CDS intraday data from gRPC API for a given ticker and date range. |
| 489 | + |
| 490 | + Parameters: |
| 491 | + ticker (str): The ticker symbol |
| 492 | + start_date (datetime.date or str, optional): Start date for data retrieval. |
| 493 | + If None, set not limits |
| 494 | + end_date (datetime.date or str, optional): End date for data retrieval. |
| 495 | + If None, set not limits |
| 496 | + sampling (sampling, optional): Sampling perdiod for intrday. Default to one minute. |
| 497 | + selected_fields (list, optional): List of specific fields to retrieve. |
| 498 | + If None, gets all fields. |
| 499 | + provider (str): Data provider, default is "Markit" |
| 500 | + Returns: |
| 501 | + pd.DataFrame: DataFrame with Date as index and all available fields as columns |
| 502 | + """ |
| 503 | + |
| 504 | + # All available fields for individual CDS |
| 505 | + all_fields = [ |
| 506 | + 'BidConventionalSpread', |
| 507 | + 'BidParSpread', |
| 508 | + 'BidUpfront', |
| 509 | + 'MidConventionalSpread', |
| 510 | + 'MidParSpread', |
| 511 | + 'MidUpfront', |
| 512 | + 'AskConventionalSpread', |
| 513 | + 'AskParSpread', |
| 514 | + 'AskUpfront' |
| 515 | + ] |
| 516 | + |
| 517 | + # Use all fields if none specified, otherwise validate selected fields |
| 518 | + if selected_fields is None: |
| 519 | + fields = all_fields |
| 520 | + else: |
| 521 | + fields = [f for f in selected_fields if f in all_fields] |
| 522 | + if not fields: |
| 523 | + raise ValueError("No valid fields selected") |
| 524 | + |
| 525 | + # Create identifier for individual CDS (not index) |
| 526 | + id = identifier.Identifier( |
| 527 | + asset_type=asset.AssetType.ASSET_TYPE_CDS, |
| 528 | + ticker=ticker |
| 529 | + ) |
| 530 | + id.provider.value = provider |
| 531 | + |
| 532 | + # Build constraints only if we have at least one date |
| 533 | + constraints_obj = None |
| 534 | + if start_date is not None or end_date is not None: |
| 535 | + # Create DateInterval with only the dates that are provided |
| 536 | + date_interval_kwargs = {} |
| 537 | + if start_date is not None: |
| 538 | + date_interval_kwargs['start_date'] = _parse_date_input(start_date) |
| 539 | + if end_date is not None: |
| 540 | + date_interval_kwargs['end_date'] = _parse_date_input(end_date) |
| 541 | + constraints_obj = constraints.Constraints( |
| 542 | + date_intervals=[date_interval.DateInterval(**date_interval_kwargs)] |
| 543 | + ) |
| 544 | + |
| 545 | + # Create request with or without constraints |
| 546 | + request_kwargs = { |
| 547 | + 'identifier': id, |
| 548 | + 'fields': fields, |
| 549 | + 'sampling': sampling |
| 550 | + } |
| 551 | + |
| 552 | + if constraints_obj is not None: |
| 553 | + request_kwargs['constraints'] = constraints_obj |
| 554 | + try: |
| 555 | + # Open gRPC channel |
| 556 | + with channel_helpers.get_grpc_channel() as channel: |
| 557 | + # Send request and receive response |
| 558 | + token = token_helpers.get_token() |
| 559 | + first = True |
| 560 | + response = [] |
| 561 | + info = None |
| 562 | + # Create service stub |
| 563 | + service = get_intraday_service.IntradayServiceStub(channel) |
| 564 | + scalar_request = get_intraday.IntradayRequest(**request_kwargs) |
| 565 | + |
| 566 | + for data in service.IntradayScalarStream(request=scalar_request, metadata=[('authorization', token)]): |
| 567 | + if first: |
| 568 | + info = data |
| 569 | + first = False |
| 570 | + else: |
| 571 | + response.append(data.data) |
| 572 | + |
| 573 | + # Process the response |
| 574 | + if not response or info is None: |
| 575 | + print("No data received") |
| 576 | + return pd.DataFrame() |
| 577 | + |
| 578 | + # Get field indices |
| 579 | + available_fields = [f for f in info.info.fields] |
| 580 | + field_indices = {field: available_fields.index(field) |
| 581 | + for field in fields if field in available_fields} |
| 582 | + |
| 583 | + # Extract dates |
| 584 | + dates = [datetime(d.datetime.year, d.datetime.month, d.datetime.day, d.datetime.hours, d.datetime.minutes, d.datetime.seconds) for d in response] |
| 585 | + |
| 586 | + # Create dictionary for DataFrame |
| 587 | + data_dict = {} |
| 588 | + |
| 589 | + # Extract data for each field |
| 590 | + for field_name, field_index in field_indices.items(): |
| 591 | + data_dict[field_name] = [b.data[field_index] for b in response] |
| 592 | + |
| 593 | + # Create DataFrame |
| 594 | + df = pd.DataFrame(data_dict, index=dates) |
| 595 | + df.index.name = 'Datetime' |
| 596 | + |
| 597 | + # Sort by date for better readability |
| 598 | + df = df.sort_index() |
| 599 | + return df |
| 600 | + |
| 601 | + except grpc.RpcError as e: |
| 602 | + print(f"gRPC Error: {e.code().name}") |
| 603 | + print(f"Details: {e.details()}") |
| 604 | + return pd.DataFrame() |
| 605 | + except Exception as e: |
| 606 | + print(f"Error: {str(e)}") |
| 607 | + return pd.DataFrame() |
| 608 | + |
| 609 | + |
316 | 610 | def get_future_daily(ticker, start_date=None, end_date=None, provider="FirstRateData"): |
317 | 611 | """ |
318 | 612 | Fetch Future daily data from gRPC API for a given ticker and optionally filter by date range. |
|
0 commit comments