Skip to content

Commit

Permalink
[NFT Metadata Crawler] Rename crate to remove parser (#14815)
Browse files Browse the repository at this point in the history
* remove

* refavtor

* refactor configs

* refactor

* lint
  • Loading branch information
just-in-chang authored Oct 3, 2024
1 parent 9271edf commit d17389a
Show file tree
Hide file tree
Showing 38 changed files with 206 additions and 206 deletions.
2 changes: 1 addition & 1 deletion .dockerignore
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@
!api/doc/
!crates/indexer/migrations/**/*.sql
!ecosystem/indexer-grpc/indexer-grpc-parser/migrations/**/*.sql
!ecosystem/nft-metadata-crawler-parser/migrations/**/*.sql
!ecosystem/nft-metadata-crawler/migrations/**/*.sql
!rust-toolchain.toml
!scripts/
!terraform/helm/aptos-node/
Expand Down
2 changes: 1 addition & 1 deletion Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 2 additions & 2 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -125,7 +125,7 @@ members = [
"ecosystem/indexer-grpc/indexer-test-transactions",
"ecosystem/indexer-grpc/indexer-transaction-generator",
"ecosystem/indexer-grpc/transaction-filter",
"ecosystem/nft-metadata-crawler-parser",
"ecosystem/nft-metadata-crawler",
"ecosystem/node-checker",
"ecosystem/node-checker/fn-check-client",
"execution/block-partitioner",
Expand Down Expand Up @@ -392,7 +392,7 @@ aptos-network-benchmark = { path = "network/benchmark" }
aptos-network-builder = { path = "network/builder" }
aptos-network-checker = { path = "crates/aptos-network-checker" }
aptos-network-discovery = { path = "network/discovery" }
aptos-nft-metadata-crawler-parser = { path = "ecosystem/nft-metadata-crawler-parser" }
aptos-nft-metadata-crawler = { path = "ecosystem/nft-metadata-crawler" }
aptos-node = { path = "aptos-node" }
aptos-node-checker = { path = "ecosystem/node-checker" }
aptos-node-identity = { path = "crates/aptos-node-identity" }
Expand Down
4 changes: 2 additions & 2 deletions docker/builder/build-indexer.sh
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ cargo build --locked --profile=$PROFILE \
-p aptos-indexer-grpc-cache-worker \
-p aptos-indexer-grpc-file-store \
-p aptos-indexer-grpc-data-service \
-p aptos-nft-metadata-crawler-parser \
-p aptos-nft-metadata-crawler \
-p aptos-indexer-grpc-file-store-backfiller \
"$@"

Expand All @@ -24,7 +24,7 @@ BINS=(
aptos-indexer-grpc-cache-worker
aptos-indexer-grpc-file-store
aptos-indexer-grpc-data-service
aptos-nft-metadata-crawler-parser
aptos-nft-metadata-crawler
aptos-indexer-grpc-file-store-backfiller
)

Expand Down
2 changes: 1 addition & 1 deletion docker/builder/nft-metadata-crawler.Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ RUN --mount=type=cache,target=/var/cache/apt,sharing=locked \
libpq-dev \
curl

COPY --link --from=indexer-builder /aptos/dist/aptos-nft-metadata-crawler-parser /usr/local/bin/aptos-nft-metadata-crawler-parser
COPY --link --from=indexer-builder /aptos/dist/aptos-nft-metadata-crawler /usr/local/bin/aptos-nft-metadata-crawler

# The health check port
EXPOSE 8080
160 changes: 0 additions & 160 deletions ecosystem/nft-metadata-crawler-parser/src/config.rs

This file was deleted.

File renamed without changes.
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[package]
name = "aptos-nft-metadata-crawler-parser"
description = "NFT Metadata Crawler Parser service."
name = "aptos-nft-metadata-crawler"
description = "NFT Metadata Crawler related services."
version = "0.1.0"

# Workspace inherited keys
Expand Down
File renamed without changes.
20 changes: 20 additions & 0 deletions ecosystem/nft-metadata-crawler/src/asset_uploader/config.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
// Copyright © Aptos Foundation
// SPDX-License-Identifier: Apache-2.0

use serde::{Deserialize, Serialize};

/// Required account data and auth keys for Cloudflare
#[derive(Clone, Debug, Deserialize, Serialize)]
#[serde(deny_unknown_fields)]
pub struct AssetUploaderConfig {
/// Cloudflare API key
pub cloudflare_auth_key: String,
/// Cloudflare Account ID provided at the images home page used to authenticate requests
pub cloudflare_account_id: String,
/// Cloudflare Account Hash provided at the images home page used for generating the CDN image URLs
pub cloudflare_account_hash: String,
/// Cloudflare Image Delivery URL prefix provided at the images home page used for generating the CDN image URLs
pub cloudflare_image_delivery_prefix: String,
/// In addition to on the fly transformations, Cloudflare images can be returned in preset variants. This is the default variant used with the saved CDN image URLs.
pub cloudflare_default_variant: String,
}
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,9 @@
// SPDX-License-Identifier: Apache-2.0

use crate::{
config::{AssetUploaderConfig, Server},
models::nft_metadata_crawler_uris::NFTMetadataCrawlerURIs,
asset_uploader::config::AssetUploaderConfig,
config::Server,
models::parsed_asset_uris::ParsedAssetUris,
utils::{
constants::{MAX_ASSET_UPLOAD_RETRY_SECONDS, MAX_RETRY_TIME_SECONDS},
database::upsert_uris,
Expand All @@ -23,6 +24,8 @@ use std::{sync::Arc, time::Duration};
use tracing::{info, warn};
use url::Url;

pub mod config;

#[derive(Clone)]
pub struct AssetUploaderContext {
pub asset_uploader_config: Arc<AssetUploaderConfig>,
Expand Down Expand Up @@ -141,7 +144,7 @@ impl AssetUploaderContext {
cdn_uri = cdn_url,
"[Asset Uploader] Writing to Postgres"
);
let mut model = NFTMetadataCrawlerURIs::new(url.as_ref());
let mut model = ParsedAssetUris::new(url.as_ref());
model.set_cdn_image_uri(Some(cdn_url.clone()));

let mut conn = self_clone.pool.get().context("Failed to get connection")?;
Expand Down
93 changes: 93 additions & 0 deletions ecosystem/nft-metadata-crawler/src/config.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,93 @@
// Copyright © Aptos Foundation
// SPDX-License-Identifier: Apache-2.0

use crate::{
asset_uploader::{config::AssetUploaderConfig, AssetUploaderContext},
parser::{config::ParserConfig, ParserContext},
utils::database::{establish_connection_pool, run_migrations},
};
use aptos_indexer_grpc_server_framework::RunnableConfig;
use axum::Router;
use diesel::{
r2d2::{ConnectionManager, Pool},
PgConnection,
};
use enum_dispatch::enum_dispatch;
use serde::{Deserialize, Serialize};
use tokio::net::TcpListener;
use tracing::info;

/// Trait for building a router for axum
#[enum_dispatch]
pub trait Server: Send + Sync {
fn build_router(&self) -> Router;
}

#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(tag = "type")]
pub enum ServerConfig {
Parser(ParserConfig),
AssetUploader(AssetUploaderConfig),
}

/// Structs to hold config from YAML
#[derive(Clone, Debug, Deserialize, Serialize)]
#[serde(deny_unknown_fields)]
pub struct NFTMetadataCrawlerConfig {
pub database_url: String,
pub server_port: u16,
pub server_config: ServerConfig,
}

#[derive(Clone)]
#[enum_dispatch(Server)]
pub enum ServerContext {
Parser(ParserContext),
AssetUploader(AssetUploaderContext),
}

impl ServerConfig {
pub async fn build_context(
&self,
pool: Pool<ConnectionManager<PgConnection>>,
) -> ServerContext {
match self {
ServerConfig::Parser(parser_config) => {
ServerContext::Parser(ParserContext::new(parser_config.clone(), pool).await)
},
ServerConfig::AssetUploader(asset_uploader_config) => ServerContext::AssetUploader(
AssetUploaderContext::new(asset_uploader_config.clone(), pool),
),
}
}
}

#[async_trait::async_trait]
impl RunnableConfig for NFTMetadataCrawlerConfig {
/// Main driver function that establishes a connection to Pubsub and parses the Pubsub entries in parallel
async fn run(&self) -> anyhow::Result<()> {
info!("[NFT Metadata Crawler] Starting with config: {:?}", self);

info!("[NFT Metadata Crawler] Connecting to database");
let pool = establish_connection_pool(&self.database_url);
info!("[NFT Metadata Crawler] Database connection successful");

info!("[NFT Metadata Crawler] Running migrations");
run_migrations(&pool);
info!("[NFT Metadata Crawler] Finished migrations");

// Create request context
let context = self.server_config.build_context(pool).await;
let listener = TcpListener::bind(format!("0.0.0.0:{}", self.server_port)).await?;
axum::serve(listener, context.build_router()).await?;

Ok(())
}

fn get_server_name(&self) -> String {
match &self.server_config {
ServerConfig::Parser(_) => "parser".to_string(),
ServerConfig::AssetUploader(_) => "asset_uploader".to_string(),
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@ pub mod models;
pub mod parser;
pub mod schema;
pub mod utils;
pub mod worker;

/// HEAD request to get MIME type and size of content
pub async fn get_uri_metadata(url: &str) -> anyhow::Result<(String, u32)> {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
// SPDX-License-Identifier: Apache-2.0

use aptos_indexer_grpc_server_framework::ServerArgs;
use aptos_nft_metadata_crawler_parser::config::NFTMetadataCrawlerConfig;
use aptos_nft_metadata_crawler::config::NFTMetadataCrawlerConfig;

#[tokio::main]
async fn main() -> anyhow::Result<()> {
Expand Down
Loading

0 comments on commit d17389a

Please sign in to comment.