Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 7 additions & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

8 changes: 8 additions & 0 deletions price/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ license.workspace = true

[dependencies]
anyhow = { workspace = true }
async-trait = { workspace = true }
config = { workspace = true }
clap = { workspace = true }
serde = { workspace = true }
Expand All @@ -19,16 +20,23 @@ metrics = { workspace = true }
tokio = { workspace = true }
chrono = { workspace = true }
helium-proto = { workspace = true }
prost = { workspace = true }
rust_decimal = { workspace = true }
sqlx = { workspace = true }
triggered = { workspace = true }
humantime-serde = { workspace = true }

custom-tracing = { path = "../custom_tracing" }
db-store = { path = "../db_store" }
file-store = { path = "../file_store" }
file-store-oracles = { path = "../file_store_oracles" }
poc-metrics = { path = "../metrics" }
solana = { path = "../solana" }
task-manager = { path = "../task_manager" }
tls-init = { path = "../tls_init" }

helium-iceberg = { path = "../helium_iceberg" }
trino-rust-client = { version = "0.9" }

[dev-dependencies]
temp-env = "0.3.6"
7 changes: 7 additions & 0 deletions price/migrations/1_files_processed.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
CREATE TABLE files_processed (
process_name TEXT NOT NULL DEFAULT 'default',
file_name VARCHAR PRIMARY KEY,
file_type VARCHAR NOT NULL,
file_timestamp TIMESTAMPTZ NOT NULL,
processed_at TIMESTAMPTZ NOT NULL
);
42 changes: 40 additions & 2 deletions price/pkg/settings-template.toml
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,6 @@
#
# source = "https://hermes.pyth.network/v2/updates/price/latest?ids[]=649fdd7ec08e8e2a20f425729854e90293dcbe2376abc47197a14da6ff339756"

# S3 bucket to write price reports to. Required.
output_bucket = "price"

# Folder for the local on-disk price cache / file sink staging area.
# Default below.
Expand All @@ -29,6 +27,13 @@ interval = "60 seconds"
#
# default_price =

# S3 bucket for `PriceReportV1` protobuf files.
# - Optional for `server`. Omit when running an Iceberg-only deployment;
# the server will then fan out only to the Iceberg sink.
# - Required for `backfill`, which reads these files back to seed Iceberg.
#
# [output]
# bucket = "price"

[custom_tracing]
# File watched for runtime tracing-config reloads. Default below.
Expand Down Expand Up @@ -56,3 +61,36 @@ interval = "60 seconds"
# Prometheus scrape endpoint. Default below.
#
# endpoint = "127.0.0.1:19000"


# Database settings. Required only when running the `backfill` subcommand;
# the `server` path does not open any DB connections. Used by the
# file_info_poller to track which S3 files have already been processed.
#
# [database]
# url = "postgres://postgres:postgres@localhost/price"
# max_connections = 10


# Iceberg catalog settings. When provided, the `server` also writes each
# live tick into the Iceberg table `tokens.prices`. Required by `backfill`.
#
# Live writes go through a `BatchedWriter` that spools each record to
# `<cache>/iceberg-spool` for crash durability and commits a single
# Iceberg snapshot when either `iceberg_batch_size` records or
# `iceberg_batch_timeout` of wall-clock time has elapsed since the last
# commit. Defaults below produce ~24 snapshots/day at a 60s tick.
#
# iceberg_batch_size = 60
# iceberg_batch_timeout = "1 hour"
#
# [iceberg_settings]
# catalog_uri = "http://iceberg-catalog:8181"
# catalog_name = "nova"
# warehouse = "s3://warehouse-bucket/iceberg"
#
# [iceberg_settings.auth]
# # (optional) OAuth2 / token config
#
# [iceberg_settings.s3]
# region = "us-west-2"
Loading
Loading