|
7 | 7 | get_cds_index_intraday - Get CDS Index intraday data as a DataFrame using Ganymede gRPC API. |
8 | 8 | get_cds_daily - Get CDS daily data as a DataFrame using Ganymede gRPC API. |
9 | 9 | get_cds_intraday - Get CDS intraday data as a DataFrame using Ganymede gRPC API. |
| 10 | + get_index_tick - Get Index tick data as a DataFrame using Ganymede gRPC API. |
10 | 11 | get_future_daily - Get future daily data as a DataFrame using Ganymede gRPC API. |
11 | 12 | get_equity_daily - Get equity daily data as a DataFrame using Ganymede gRPC API. |
12 | 13 | """ |
|
17 | 18 | from datetime import date,datetime |
18 | 19 | from google.type import date_pb2 |
19 | 20 | from google.type import datetime_pb2 |
| 21 | +from google.type import timeofday_pb2 |
| 22 | + |
20 | 23 |
|
21 | 24 | from systemathics.apis.type.shared.v1 import asset_pb2 as asset |
22 | 25 | from systemathics.apis.type.shared.v1 import constraints_pb2 as constraints |
|
29 | 32 | import systemathics.apis.services.daily.v2.get_daily_pb2_grpc as get_daily_service |
30 | 33 | import systemathics.apis.services.intraday.v2.get_intraday_pb2 as get_intraday |
31 | 34 | import systemathics.apis.services.intraday.v2.get_intraday_pb2_grpc as get_intraday_service |
| 35 | +import systemathics.apis.services.tick.v2.get_tick_pb2 as get_tick |
| 36 | +import systemathics.apis.services.tick.v2.get_tick_pb2_grpc as get_tick_service |
| 37 | +from systemathics.apis.type.shared.v1 import time_interval_pb2 as time_interval |
32 | 38 |
|
33 | 39 | import systemathics.apis.helpers.token_helpers as token_helpers |
34 | 40 | import systemathics.apis.helpers.channel_helpers as channel_helpers |
@@ -362,6 +368,192 @@ def get_cds_daily(ticker, start_date=None, end_date=None, batch=None, selected_f |
362 | 368 | print(f"Error: {str(e)}") |
363 | 369 | return pd.DataFrame() |
364 | 370 |
|
| 371 | +def get_index_tick(ticker, start_date=None, end_date=None, start_time=None, end_time=None, selected_fields=None, provider="GoldmanSachs"): |
| 372 | + """ |
| 373 | + Fetch Index tick data from gRPC API for a given ticker and date range with optional client-side time filtering. |
| 374 | + |
| 375 | + Parameters: |
| 376 | + ticker (str): The ticker symbol |
| 377 | + start_date (datetime.date or str, optional): Start date for data retrieval. |
| 378 | + If None, set no limits |
| 379 | + end_date (datetime.date or str, optional): End date for data retrieval. |
| 380 | + If None, set no limits |
| 381 | + start_time (str, optional): Start time in 'HH:MM' format (e.g., '09:30') or 'HH:MM:ss' format (e.g., '09:30:05') or for client-side filtering. |
| 382 | + If None, no time restriction |
| 383 | + end_time (str, optional): End time in 'HH:MM' format (e.g., '16:00') or 'HH:MM:ss' format (e.g., '16:25:45')for client-side filtering. |
| 384 | + If None, no time restriction |
| 385 | + selected_fields (list, optional): List of specific fields to retrieve. |
| 386 | + If None, gets all fields. |
| 387 | + provider (str): Data provider, default is "GoldmanSachs" |
| 388 | + |
| 389 | + Returns: |
| 390 | + pd.DataFrame: DataFrame with Datetime as index and all available fields as columns |
| 391 | + """ |
| 392 | + |
| 393 | + # All available fields for Index tick data |
| 394 | + all_fields = [ |
| 395 | + "AskBenchmarkSpread", |
| 396 | + "AskCleanPrice", |
| 397 | + "AskDirtyPrice", |
| 398 | + "AskGSpread", |
| 399 | + "AskModifiedDuration", |
| 400 | + "AskYield", |
| 401 | + "AskZSpread", |
| 402 | + "BidBenchmarkSpread", |
| 403 | + "BidCleanPrice", |
| 404 | + "BidDirtyPrice", |
| 405 | + "BidGSpread", |
| 406 | + "BidModifiedDuration", |
| 407 | + "BidYield", |
| 408 | + "BidZSpread", |
| 409 | + "MidBenchmarkSpread", |
| 410 | + "MidCleanPrice", |
| 411 | + "MidDirtyPrice", |
| 412 | + "MidGSpread", |
| 413 | + "MidModifiedDuration", |
| 414 | + "MidYield", |
| 415 | + "MidZSpread", |
| 416 | + "OfficialBenchmarkSpread", |
| 417 | + "OfficialCleanPrice", |
| 418 | + "OfficialDirtyPrice", |
| 419 | + "OfficialGSpread", |
| 420 | + "OfficialModifiedDuration", |
| 421 | + "OfficialYield", |
| 422 | + "OfficialZSpread" |
| 423 | + ] |
| 424 | + |
| 425 | + # Use all fields if none specified, otherwise validate selected fields |
| 426 | + if selected_fields is None: |
| 427 | + fields = all_fields |
| 428 | + else: |
| 429 | + fields = [f for f in selected_fields if f in all_fields] |
| 430 | + if not fields: |
| 431 | + raise ValueError("No valid fields selected") |
| 432 | + |
| 433 | + # Create identifier for Index |
| 434 | + id = identifier.Identifier( |
| 435 | + asset_type=asset.AssetType.ASSET_TYPE_INDEX, |
| 436 | + ticker=ticker |
| 437 | + ) |
| 438 | + id.provider.value = provider |
| 439 | + |
| 440 | + # Build constraints only if we have at least one date (no time intervals due to server limitation) |
| 441 | + constraints_obj = None |
| 442 | + if start_date is not None or end_date is not None: |
| 443 | + # Create DateInterval with only the dates that are provided |
| 444 | + date_interval_kwargs = {} |
| 445 | + if start_date is not None: |
| 446 | + date_interval_kwargs['start_date'] = _parse_date_input(start_date) |
| 447 | + if end_date is not None: |
| 448 | + date_interval_kwargs['end_date'] = _parse_date_input(end_date) |
| 449 | + |
| 450 | + constraints_obj = constraints.Constraints( |
| 451 | + date_intervals=[date_interval.DateInterval(**date_interval_kwargs)] |
| 452 | + ) |
| 453 | + |
| 454 | + # Create request with or without constraints |
| 455 | + request_kwargs = { |
| 456 | + 'identifier': id, |
| 457 | + 'fields': fields |
| 458 | + } |
| 459 | + |
| 460 | + if constraints_obj is not None: |
| 461 | + request_kwargs['constraints'] = constraints_obj |
| 462 | + |
| 463 | + try: |
| 464 | + # Open gRPC channel |
| 465 | + with channel_helpers.get_grpc_channel() as channel: |
| 466 | + # Send request and receive response |
| 467 | + token = token_helpers.get_token() |
| 468 | + first = True |
| 469 | + response = [] |
| 470 | + info = None |
| 471 | + # Create service stub for Tick service |
| 472 | + service = get_tick_service.TickServiceStub(channel) |
| 473 | + scalar_request = get_tick.TickRequest(**request_kwargs) |
| 474 | + |
| 475 | + for data in service.TickScalarStream(request=scalar_request, metadata=[('authorization', token)]): |
| 476 | + if first: |
| 477 | + info = data |
| 478 | + first = False |
| 479 | + else: |
| 480 | + response.append(data.data) |
| 481 | + |
| 482 | + # Process the response |
| 483 | + if not response or info is None: |
| 484 | + print("No data received") |
| 485 | + return pd.DataFrame() |
| 486 | + |
| 487 | + # Get field indices |
| 488 | + available_fields = [f for f in info.info.fields] |
| 489 | + field_indices = {field: available_fields.index(field) |
| 490 | + for field in fields if field in available_fields} |
| 491 | + |
| 492 | + # Extract timestamps with full precision (including microseconds if available) |
| 493 | + dates = [] |
| 494 | + for d in response: |
| 495 | + dt = datetime(d.datetime.year, d.datetime.month, d.datetime.day, |
| 496 | + d.datetime.hours, d.datetime.minutes, d.datetime.seconds) |
| 497 | + # Add microseconds if available in the protobuf message |
| 498 | + if hasattr(d.datetime, 'nanos'): |
| 499 | + # Convert nanoseconds to microseconds (Python datetime only supports microseconds) |
| 500 | + microseconds = d.datetime.nanos // 1000 |
| 501 | + dt = dt.replace(microsecond=microseconds) |
| 502 | + elif hasattr(d.datetime, 'micros'): |
| 503 | + dt = dt.replace(microsecond=d.datetime.micros) |
| 504 | + dates.append(dt) |
| 505 | + |
| 506 | + # Create dictionary for DataFrame |
| 507 | + data_dict = {} |
| 508 | + |
| 509 | + # Extract data for each field |
| 510 | + for field_name, field_index in field_indices.items(): |
| 511 | + data_dict[field_name] = [b.data[field_index] for b in response] |
| 512 | + |
| 513 | + # Create DataFrame |
| 514 | + df = pd.DataFrame(data_dict, index=dates) |
| 515 | + df.index.name = 'Datetime' |
| 516 | + |
| 517 | + # Sort by date for better readability |
| 518 | + df = df.sort_index() |
| 519 | + |
| 520 | + # Apply client-side time filtering if needed |
| 521 | + if not df.empty and (start_time is not None or end_time is not None): |
| 522 | + |
| 523 | + # Convert string times to time objects if needed |
| 524 | + if isinstance(start_time, str): |
| 525 | + time_parts = start_time.split(':') |
| 526 | + hour = int(time_parts[0]) |
| 527 | + minute = int(time_parts[1]) if len(time_parts) > 1 else 0 |
| 528 | + start_time_obj = datetime.min.time().replace(hour=hour, minute=minute) |
| 529 | + else: |
| 530 | + start_time_obj = start_time |
| 531 | + |
| 532 | + if isinstance(end_time, str): |
| 533 | + time_parts = end_time.split(':') |
| 534 | + hour = int(time_parts[0]) |
| 535 | + minute = int(time_parts[1]) if len(time_parts) > 1 else 0 |
| 536 | + end_time_obj = datetime.min.time().replace(hour=hour, minute=minute) |
| 537 | + else: |
| 538 | + end_time_obj = end_time |
| 539 | + |
| 540 | + # Apply time filtering |
| 541 | + if start_time_obj is not None and end_time_obj is not None: |
| 542 | + df = df.between_time(start_time_obj, end_time_obj) |
| 543 | + elif start_time_obj is not None: |
| 544 | + df = df[df.index.time >= start_time_obj] |
| 545 | + elif end_time_obj is not None: |
| 546 | + df = df[df.index.time <= end_time_obj] |
| 547 | + |
| 548 | + return df |
| 549 | + |
| 550 | + except grpc.RpcError as e: |
| 551 | + print(f"gRPC Error: {e.code().name}") |
| 552 | + print(f"Details: {e.details()}") |
| 553 | + return pd.DataFrame() |
| 554 | + except Exception as e: |
| 555 | + print(f"Error: {str(e)}") |
| 556 | + return pd.DataFrame() |
365 | 557 |
|
366 | 558 | def get_cds_index_intraday(ticker, start_date=None, end_date=None, sampling=sampling.SAMPLING_ONE_MINUTE, selected_fields=None, provider="Markit"): |
367 | 559 | """ |
|
0 commit comments