From 662e7dc24bc33616199d15ee9efb041da4e68caa Mon Sep 17 00:00:00 2001 From: yaphet Date: Wed, 11 Mar 2020 19:36:05 +0800 Subject: [PATCH] fix spark doc (#1904) --- .../data-import/spark-writer.md | 8 ++++---- .../data-import/spark-writer.md | 8 ++++---- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/docs/manual-CN/3.build-develop-and-administration/3.deploy-and-administrations/server-administration/storage-service-administration/data-import/spark-writer.md b/docs/manual-CN/3.build-develop-and-administration/3.deploy-and-administrations/server-administration/storage-service-administration/data-import/spark-writer.md index 38be0f95714..001feda5275 100644 --- a/docs/manual-CN/3.build-develop-and-administration/3.deploy-and-administrations/server-administration/storage-service-administration/data-import/spark-writer.md +++ b/docs/manual-CN/3.build-develop-and-administration/3.deploy-and-administrations/server-administration/storage-service-administration/data-import/spark-writer.md @@ -203,7 +203,7 @@ Player 表结构如下: } # 处理标签 - tags: { + tags: [ # 从 HDFS 文件加载数据, 此处数据类型为 Parquet # tag 名称为 tag name 0 @@ -235,10 +235,10 @@ Player 表结构如下: } vertex: vertex_id_field } - } + ] # 处理边 - edges: { + edges: [ # 从 HDFS 加载数据,数据类型为 JSON # 边名称为 edge_name_0 # HDFS JSON 文件中的 field_0、field_1、field 2 将被写入 edge_name_0 @@ -271,7 +271,7 @@ Player 表结构如下: source: source_id_field target: target_id_field } - } + ] } ``` diff --git a/docs/manual-EN/3.build-develop-and-administration/3.deploy-and-administrations/server-administration/storage-service-administration/data-import/spark-writer.md b/docs/manual-EN/3.build-develop-and-administration/3.deploy-and-administrations/server-administration/storage-service-administration/data-import/spark-writer.md index 82bed3c679a..2d128877fc1 100644 --- a/docs/manual-EN/3.build-develop-and-administration/3.deploy-and-administrations/server-administration/storage-service-administration/data-import/spark-writer.md +++ b/docs/manual-EN/3.build-develop-and-administration/3.deploy-and-administrations/server-administration/storage-service-administration/data-import/spark-writer.md @@ -203,7 +203,7 @@ Example of a mapping file for the input source: } # Processing tags - tags: { + tags: [ # Loading tag from HDFS and the data type is parquet. # The tag's name is tag_name_0. @@ -235,10 +235,10 @@ Example of a mapping file for the input source: } vertex: vertex_id_field } - } + ] # Processing edges - edges: { + edges: [ # Loading edge from HDFS and data type is JSON. # The edge's name is edge_name_0. # field_0, field_1 and field_2 from HDFS's JSON file are written into edge_name_0 @@ -271,7 +271,7 @@ Example of a mapping file for the input source: source: source_id_field target: target_id_field } - } + ] } ```