Skip to content

Commit

Permalink
fix(index.js): 🐛 Corrigido insert de dados opcionais no BQ
Browse files Browse the repository at this point in the history
Adicionado parametrização para permitir a inserção parcial quando ocorrer erro e habilitado o skipe de colunas desconhecidas
  • Loading branch information
joaquimsn committed Mar 6, 2021
1 parent 3a21f9c commit 3eea1ba
Show file tree
Hide file tree
Showing 3 changed files with 17 additions and 19 deletions.
28 changes: 14 additions & 14 deletions index.js
Original file line number Diff line number Diff line change
Expand Up @@ -18,20 +18,21 @@ exports.penguinDatalayerCollect = async (req, res) => {
res.sendStatus(204);
} else {
penguinConfig = await loadPenguinConfig();
deparaSchema = penguinConfig.DEPARA_SCHEMA;
let query = req.query;
debugging = query.debugging; //Se true habilita o log do json de validação
delete query.debugging;

// Verificação se o identificado de schema foi passado por parâmetro
if (!query[penguinConfig.PARAM_QUERY_STRING_SCHEMA]) {
return;
}

deparaSchema = penguinConfig.DEPARA_SCHEMA;

//Pega a lista de schemas do dataLayer para validação
//com base schema informado na requisição, caso contrário usa o default
let listaSchema = deparaSchema[query[penguinConfig.PARAM_QUERY_STRING_SCHEMA]];
let jsonSchemas = await downloadSchemas(listaSchema || deparaSchema.global);
trace(jsonSchemas);
trace('SCHEMAS', jsonSchemas);

let result = [];
jsonSchemas.forEach(schema => {
Expand All @@ -41,7 +42,7 @@ exports.penguinDatalayerCollect = async (req, res) => {
})
});

trace(result);
trace('RESULT VALID', result);
insertRowsAsStream(result);
res.status(200).send(debugging ? result : 'sucesso!');
}
Expand Down Expand Up @@ -89,21 +90,20 @@ function transformarQueryStringInObject(data) {
*/
async function insertRowsAsStream(data) {
const bigquery = new BigQuery();
const datasetId = penguinConfig.BQ_DATASET_ID;
const tableId = penguinConfig.BQ_TABLE_ID_RAWDATA;
const rows = data;

const options = {
schema: penguinConfig.BQ_SCHEMA_RAWDATA
schema: penguinConfig.BQ_SCHEMA_RAWDATA,
skipInvalidRows: true,
ignoreUnknownValues: true
};

trace(data);
// Insert data into a table
await bigquery
.dataset(datasetId)
.table(tableId)
.insert(rows, options, insertHandler);
.dataset(penguinConfig.BQ_DATASET_ID)
.table(penguinConfig.BQ_TABLE_ID_RAWDATA)
.insert(data, options, insertHandler);

console.log(`Inserted ${rows.length} rows`);
console.log(`Inserted ${data.length} rows`);
}

/**
Expand Down Expand Up @@ -142,7 +142,7 @@ async function loadPenguinConfig() {

function insertHandler(err, apiResponse) {
if (err) {
console.log(err.name, err);
console.error(err.name, JSON.stringify(err));
}
}

Expand Down
Original file line number Diff line number Diff line change
@@ -1,8 +1,7 @@
{
"DEPARA_SCHEMA": {"produto":["camada_global.json","detalhamento_produto.json"],"comprafinalizada":["camada_global.json","compra_efetuada.json"],"global":["exemplo_schema.json"]},
"DEPARA_SCHEMA": {"produto":["exemplo_schema.json","detalhamento_produto.json"],"comprafinalizada":["exemplo_schema.json","compra_efetuada.json"],"global":["exemplo_schema.json"]},
"PARAM_QUERY_STRING_SCHEMA": "schema",
"BQ_DATASET_ID": "raft_suite",
"BQ_TABLE_ID_RAWDATA": "penguin_datalayer_raw",
"BQ_SCHEMA_RAWDATA": "data: DATETIME, schema: STRING, status: STRING, message: STRING, schema: STRING, objectName: STRING, keyName: STRING",
"PUB_SUB_TOPIC": ""
"BQ_SCHEMA_RAWDATA": "data: DATETIME, schema: STRING, status: STRING, message: STRING, schema: STRING, objectName: STRING, keyName: STRING"
}
3 changes: 1 addition & 2 deletions terraform/variables.tf
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,7 @@ locals {
#######################################
variable "bucket_name" {
type = string
description = "Google Cloud Storage Bucket to create, recomendado raft-suite"
default = "raft-suite"
description = "Google Cloud Storage Bucket to create, o valor informado será usado como pré-fixo para o seguinte padrão {bucket_name}-raft-suite"
}

variable "dataset_id" {
Expand Down

0 comments on commit 3eea1ba

Please sign in to comment.