Merge pull request #197 from lmorchard/196-mozlog

Switch to mozlog for logging
This commit is contained in:
Barry Chen 2018-09-24 12:42:29 -05:00 коммит произвёл GitHub
Родитель 2372417913 05c6224c90
Коммит c5d23980ae
Не найден ключ, соответствующий данной подписи
Идентификатор ключа GPG: 4AEE18F83AFDEB23
13 изменённых файлов: 839 добавлений и 723 удалений

Просмотреть файл

@ -147,8 +147,9 @@ Read this Serverless Blog post for more details: https://serverless.com/blog/ser
When using `serverless deploy` to deploy the stack, you can use several environment variables to alter configuration. Note that many of these are currently overridden by a `serverless.local.yml` file, if present.
- `LOG_INFO` - set to "1" for informational log output, "0" to disable (default: "1")
- `LOG_DEBUG` - set to "1" for verbose debug log output, "0" to disable (default: "0")
- `LOG_LEVEL` - (default: "info") one of the following severity levels for log verbosity in increasing order: critical, error, warn, info, debug, verbose, trace
- `LOG_DEBUG` - (default: "0") set to "1" to debug logging itself
- `LOG_FORMAT` - (default: "heka") set to "pretty" for human-readable, "heka" for JSON format readable by Heka
- `STAGE` - Stage for building and deploying - one of `dev`, `stage`, or `production`
- `NODE_ENV` - Use `production` for a more optimized production build, `development` for a development build with more verbose logging and other conveniences
- `PREFIX` - a prefix string used in constructing the names of resources and functions, by default a combination of service and stage names

Просмотреть файл

@ -9,11 +9,19 @@ const documentClient = new AWS.DynamoDB.DocumentClient();
const { DEV_CREDENTIALS, DEFAULT_HAWK_ALGORITHM } = require("../lib/constants");
const Sentry = require("../lib/sentry");
const Metrics = require("../lib/metrics");
const { logDebug, logInfo, jsonPretty, md5 } = require("../lib/utils.js");
const { md5 } = require("../lib/utils.js");
const REQUIRED_FIELDS = ["image", "negative_uri", "positive_uri"];
module.exports.post = async function(event, context) {
const log = require("../lib/logging")({
name: "accept",
isRequest: true,
event,
context,
});
log.info("summary");
const {
UPSTREAM_SERVICE_URL,
QUEUE_NAME: QueueName,
@ -21,15 +29,11 @@ module.exports.post = async function(event, context) {
} = process.env;
const Raven = Sentry();
logDebug(
"env",
jsonPretty({
UPSTREAM_SERVICE_URL,
QueueName,
Bucket,
})
);
log.verbose("env", {
UPSTREAM_SERVICE_URL,
QueueName,
Bucket,
});
const {
headers,
@ -37,8 +41,7 @@ module.exports.post = async function(event, context) {
requestContext: { path, requestId },
} = event;
logInfo("Accepting job", requestId);
logDebug("event", jsonPretty({ headers, params, path, requestId }));
log.verbose("event", { headers, params, path, requestId });
const {
Host: host,
@ -46,8 +49,6 @@ module.exports.post = async function(event, context) {
"X-Forwarded-Port": port = 80,
} = headers;
logDebug("headers", jsonPretty({ host, authorization, port }));
let authArtifacts;
try {
({ artifacts: authArtifacts } = await Hawk.server.authenticate(
@ -61,9 +62,10 @@ module.exports.post = async function(event, context) {
},
lookupCredentials
));
logDebug("auth", jsonPretty({ authArtifacts }));
log.commonFields.uid = authArtifacts.id;
} catch (err) {
Raven.captureException(err);
log.error("authInvalid", { authorization });
return response(
401,
{ error: err.message },
@ -76,27 +78,25 @@ module.exports.post = async function(event, context) {
body = await parseRequestBody(event);
REQUIRED_FIELDS.forEach(name => {
if (!body[name]) {
log.warn("requestInvalid", { field: name });
throw { message: `Required "${name}" is missing` };
}
});
// TODO: More input validation here?
({ negative_uri, positive_uri, positive_email, notes, image } = body);
logDebug(
"body",
jsonPretty({
negative_uri,
positive_uri,
positive_email,
notes,
image: {
filename: image.filename,
contentEncoding: image.contentEncoding,
contentType: image.contentType,
dataMD5: md5(image.data || ""),
},
})
);
log.debug("body", {
negative_uri,
positive_uri,
positive_email,
notes,
image: {
filename: image.filename,
contentEncoding: image.contentEncoding,
contentType: image.contentType,
dataMD5: md5(image.data || ""),
},
});
} catch (err) {
Raven.captureException(err);
return response(400, { error: err.message });
@ -104,8 +104,6 @@ module.exports.post = async function(event, context) {
const imageKey = `image-${requestId}`;
logDebug("imageKey", imageKey);
const upstreamServiceUrl =
UPSTREAM_SERVICE_URL !== "__MOCK__"
? UPSTREAM_SERVICE_URL
@ -115,8 +113,6 @@ module.exports.post = async function(event, context) {
event.requestContext.stage +
"/mock/upstream";
logDebug("upstreamServiceUrl", upstreamServiceUrl);
const messageData = {
datestamp: new Date().toISOString(),
upstreamServiceUrl,
@ -128,9 +124,9 @@ module.exports.post = async function(event, context) {
notes,
image: imageKey,
};
const MessageBody = JSON.stringify(messageData);
log.verbose("enqueue", messageData);
logDebug("MessageBody", MessageBody);
const MessageBody = JSON.stringify(messageData);
const imagePutResult = await S3.putObject({
Bucket,
@ -139,7 +135,7 @@ module.exports.post = async function(event, context) {
Body: image.data,
}).promise();
logDebug("imagePutResult", jsonPretty(imagePutResult));
log.verbose("imagePutResult", { imagePutResult });
const requestPutResult = await S3.putObject({
Bucket,
@ -148,7 +144,7 @@ module.exports.post = async function(event, context) {
Body: MessageBody,
}).promise();
logDebug("requestPutResult", jsonPretty(requestPutResult));
log.verbose("requestPutResult", { requestPutResult });
const { QueueUrl } = await SQS.getQueueUrl({ QueueName }).promise();
const queueSendResult = await SQS.sendMessage({
@ -156,13 +152,7 @@ module.exports.post = async function(event, context) {
MessageBody,
}).promise();
logDebug(
"queueSendResult",
jsonPretty({
QueueUrl,
queueSendResult,
})
);
log.verbose("queueSendResult", { QueueUrl, queueSendResult });
const metricsResult = await Metrics.newItem({
consumer_name: authArtifacts.id,
@ -170,7 +160,7 @@ module.exports.post = async function(event, context) {
type: image.contentType,
});
logDebug("metricsResult", jsonPretty(metricsResult));
log.verbose("metricsResult", { metricsResult });
const responseData = {
id: requestId,
@ -178,9 +168,7 @@ module.exports.post = async function(event, context) {
positive_uri,
positive_email,
};
logDebug("responseData", jsonPretty(responseData));
log.info("response", responseData);
return response(201, responseData);
};

Просмотреть файл

@ -8,6 +8,7 @@ describe("functions/heartbeat.handler", () => {
const result = await heartbeat.handler({
path: "/dev/__heartbeat__",
httpMethod: "GET",
headers: {},
});
expect(result.statusCode).to.equal(200);
expect(JSON.parse(result.body)).to.deep.equal({ status: "OK" });

Просмотреть файл

@ -1,6 +1,13 @@
"use strict";
module.exports.handler = async function(event, context) {
module.exports.handler = async function(event = {}, context = {}) {
const log = require("../lib/logging")({
name: "heartbeat",
isRequest: true,
event,
context,
});
log.info("summary");
return {
statusCode: 200,
headers: { "Content-Type": "application/json" },

Просмотреть файл

@ -4,9 +4,16 @@ const AWS = require("aws-sdk");
const SQS = new AWS.SQS({ apiVersion: "2012-11-05" });
const Sentry = require("../lib/sentry");
const Metrics = require("../lib/metrics");
const { logDebug, logInfo, jsonPretty, wait } = require("../lib/utils.js");
const { wait } = require("../lib/utils.js");
module.exports.handler = async function(event = {}, context = {}) {
const log = require("../lib/logging")({
name: "periodicMetrics",
event,
context,
});
log.info("summary");
module.exports.handler = async function(event, context) {
const Raven = Sentry();
const { DEFAULT_METRICS_PING_PERIOD } = require("../lib/constants");
@ -17,28 +24,26 @@ module.exports.handler = async function(event, context) {
parseInt(METRICS_PING_PERIOD, 10) || DEFAULT_METRICS_PING_PERIOD;
let pingCount = 0;
logInfo("Periodic metrics monitor start");
log.debug("start");
while (context.getRemainingTimeInMillis() > pingPeriod + 1000) {
try {
await sendHeartbeatMetrics(process.env, context);
await sendHeartbeatMetrics(log, process.env, context);
pingCount++;
} catch (err) {
Raven.captureException(err);
logInfo("Failed to send periodic metrics", err);
log.error("error", { err });
}
logDebug(
"Pausing for",
log.verbose("pause", {
pingPeriod,
"ms",
context.getRemainingTimeInMillis(),
"ms remaining"
);
remaining: context.getRemainingTimeInMillis(),
});
await wait(pingPeriod);
}
logInfo(`Periodic metrics monitor exit, pingCount=${pingCount}`);
log.verbose("exit", { pingCount });
};
const sendHeartbeatMetrics = async (
log,
{ QUEUE_NAME },
{ awsRequestId: poller_id }
) => {
@ -55,7 +60,9 @@ const sendHeartbeatMetrics = async (
],
}).promise();
const apiEndTime = Date.now();
logDebug("SQS.getQueueAttributes duration", apiEndTime - apiStartTime, "ms");
log.debug("getQueueAttributesDuration", {
duration: apiEndTime - apiStartTime,
});
const {
ApproximateNumberOfMessages,
@ -69,6 +76,6 @@ const sendHeartbeatMetrics = async (
items_in_progress: parseInt(ApproximateNumberOfMessagesNotVisible, 10),
items_in_waiting: parseInt(ApproximateNumberOfMessagesDelayed, 10),
};
logDebug("pingData", jsonPretty(pingData));
log.debug("pingData", { pingData });
return Metrics.pollerHeartbeat(pingData);
};

Просмотреть файл

@ -8,22 +8,24 @@ const request = require("request-promise-native");
const { RATE_LIMIT, RATE_PERIOD, RATE_WAIT } = require("../lib/constants");
const Sentry = require("../lib/sentry");
const Metrics = require("../lib/metrics");
const {
logDebug,
logInfo,
jsonPretty,
wait,
epochNow,
} = require("../lib/utils.js");
const { wait, epochNow } = require("../lib/utils.js");
exports.handler = async function(event = {}, context = {}) {
const { Records } = event;
const log = require("../lib/logging")({
name: "processQueueItem",
event,
context,
});
log.info("summary", { recordCount: Records.length });
exports.handler = async function({ Records }, context) {
logInfo("Received", Records.length, "messages to process");
const results = [];
for (let idx = 0; idx < Records.length; idx++) {
const result = await exports.handleOne(Records[idx], context);
results.push(result);
}
logInfo("Finished processing batch of", results.length, "messages");
log.debug("done", { resultCount: results.length });
return results;
};
@ -68,7 +70,17 @@ Response JSON:
${responseUrl}
`;
exports.handleOne = async function({ receiptHandle, body }, { awsRequestId }) {
exports.handleOne = async function(event, context) {
const log = require("../lib/logging")({
name: "processQueueItem.worker",
event,
context,
});
log.info("summary");
const { body } = event;
const { awsRequestId } = context;
const {
HITRATE_TABLE,
CONTENT_BUCKET: Bucket,
@ -80,19 +92,16 @@ exports.handleOne = async function({ receiptHandle, body }, { awsRequestId }) {
const Raven = Sentry();
logDebug(
"env",
jsonPretty({
HITRATE_TABLE,
Bucket,
EMAIL_FROM,
EMAIL_TO,
EMAIL_EXPIRES,
})
);
log.verbose("env", {
HITRATE_TABLE,
Bucket,
EMAIL_FROM,
EMAIL_TO,
EMAIL_EXPIRES,
});
const parsedBody = JSON.parse(body);
logDebug("parsedBody", jsonPretty(parsedBody));
log.verbose("parsedBody", { parsedBody });
const {
datestamp,
@ -121,7 +130,7 @@ exports.handleOne = async function({ receiptHandle, body }, { awsRequestId }) {
timing_submitted: null,
};
logInfo("Processing queue item", id);
log.info("processing", { id });
try {
// Pause if we're at the rate limit for current expiration window
@ -135,10 +144,10 @@ exports.handleOne = async function({ receiptHandle, body }, { awsRequestId }) {
})
.promise();
logDebug("hitRateData", jsonPretty(data));
log.verbose("hitRateData", { data });
if (data.Count >= RATE_LIMIT) {
logInfo("Pausing for rate limit", epochNow());
log.info("pausing");
rateLimited = true;
await wait(RATE_WAIT);
} else {
@ -158,7 +167,7 @@ exports.handleOne = async function({ receiptHandle, body }, { awsRequestId }) {
})
.promise();
logDebug("hitRatePutResult", jsonPretty(hitRatePutResult));
log.verbose("hitRatePutResult", { hitRatePutResult });
const imageUrl = S3.getSignedUrl("getObject", {
Bucket,
@ -166,7 +175,7 @@ exports.handleOne = async function({ receiptHandle, body }, { awsRequestId }) {
Expires: 600, // 5 minutes
});
logDebug("imageUrl", imageUrl);
log.verbose("imageUrl", { imageUrl });
metricsPing.timing_sent = Date.now() - Date.parse(datestamp);
@ -186,7 +195,7 @@ exports.handleOne = async function({ receiptHandle, body }, { awsRequestId }) {
metricsPing.timing_received = Date.now() - timingReceivedStart;
metricsPing.photodna_tracking_id = upstreamServiceResponse.TrackingId;
logDebug("upstreamServiceResponse", jsonPretty(upstreamServiceResponse));
log.verbose("upstreamServiceResponse", { upstreamServiceResponse });
const { IsMatch } = upstreamServiceResponse;
metricsPing.is_match = IsMatch;
@ -197,7 +206,7 @@ exports.handleOne = async function({ receiptHandle, body }, { awsRequestId }) {
S3.deleteObject({ Bucket, Key: `${image}` }).promise(),
S3.deleteObject({ Bucket, Key: `${image}-request.json` }).promise(),
]);
logDebug("deleteResult", jsonPretty(deleteResult));
log.verbose("deleteResult", { deleteResult });
} else {
// On positive match, store the details of the match response.
const putResult = await S3.putObject({
@ -216,7 +225,7 @@ exports.handleOne = async function({ receiptHandle, body }, { awsRequestId }) {
}),
}).promise();
logDebug("putResult", jsonPretty(putResult));
log.verbose("putResult", { putResult });
// Send an email alert on positive match, if addresses are available.
const ToAddresses = [];
@ -276,11 +285,11 @@ exports.handleOne = async function({ receiptHandle, body }, { awsRequestId }) {
},
},
};
logDebug("emailParams", jsonPretty(emailParams));
log.verbose("emailParams", { emailParams });
const emailResult = await SES.sendEmail(emailParams).promise();
logDebug("emailResult", jsonPretty(emailResult));
logInfo(`Sent notification email (${emailResult.MessageId})`);
log.verbose("emailResult", { emailResult });
log.info("sentEmail", { messageId: emailResult.MessageId });
}
}
@ -304,15 +313,15 @@ exports.handleOne = async function({ receiptHandle, body }, { awsRequestId }) {
},
});
metricsPing.timing_submitted = Date.now() - timingSubmittedStart;
logDebug("callbackResult", jsonPretty(callbackResult));
log.verbose("callbackResult", { callbackResult });
} catch (err) {
Raven.captureException(err);
metricsPing.is_error = true;
logInfo("REQUEST ERROR", err);
log.error("callbackError", { err });
throw err;
}
const metricsResult = await Metrics.workerWorks(metricsPing);
logDebug("metricsResult", jsonPretty(metricsResult));
log.verbose("metricsResult", { metricsResult });
return id;
};

Просмотреть файл

@ -11,6 +11,7 @@ describe("functions/version.handler", () => {
const result = await version.handler({
path: "/dev/__version__",
httpMethod: "GET",
headers: {},
});
expect(result.statusCode).to.equal(200);
expect(JSON.parse(result.body)).to.deep.equal({

Просмотреть файл

@ -1,7 +1,14 @@
"use strict";
const packageMeta = require("../package.json");
module.exports.handler = async function(event, context) {
module.exports.handler = async function(event = {}, context = {}) {
const log = require("../lib/logging")({
name: "version",
isRequest: true,
event,
context,
});
log.info("summary");
const { GIT_COMMIT: commit = "" } = process.env;
return {
statusCode: 200,

80
lib/logging.js Normal file
Просмотреть файл

@ -0,0 +1,80 @@
// Configure logging and wrap mozlog methods in decorators that automatically
// include function context and event information
module.exports = ({ name, event, context, isRequest = false }) => {
const {
LOG_LEVEL = "info",
LOG_FORMAT = "heka",
LOG_DEBUG = "0",
GIT_COMMIT = "",
} = process.env;
const mozlog = require("mozlog")({
app: "watchdog-proxy",
level: LOG_LEVEL,
fmt: LOG_FORMAT,
debug: LOG_DEBUG === "1",
});
const log = mozlog(name);
const selector = isRequest ? selectRequest : selectBase;
const out = {};
out.commonFields = {
version: GIT_COMMIT,
};
LOG_LEVELS.forEach(
level =>
(out[level] = (op, fields = {}) =>
log[level](
op,
selector({ event, context, fields, commonFields: out.commonFields })
))
);
return out;
};
const LOG_LEVELS = [
"trace",
"verbose",
"debug",
"info",
"warn",
"error",
"critical",
];
const selectRequest = ({ event, context, fields = {}, commonFields = {} }) =>
Object.assign(selectRequestEvent(event), selectBase({ context, fields }));
const selectBase = ({ context, fields = {}, commonFields = {} }) =>
Object.assign(
{ timestamp: Date.now() },
selectContext(context),
commonFields,
fields
);
// https://docs.aws.amazon.com/lambda/latest/dg/eventsources.html#eventsources-api-gateway-request
const selectRequestEvent = ({
path,
httpMethod: method,
headers: { Host: hostname, "User-Agent": agent },
}) => ({
path,
method,
agent,
hostname,
});
// https://docs.aws.amazon.com/lambda/latest/dg/nodejs-prog-model-context.html
const selectContext = ({
awsRequestId,
functionName,
functionVersion,
memoryLimitInMB,
}) => ({
awsRequestId,
functionName,
functionVersion,
memoryLimitInMB,
});

Просмотреть файл

@ -15,8 +15,7 @@ global.env = {
EMAIL_FROM: "lorchard@mozilla.com",
EMAIL_TO: "",
EMAIL_EXPIRES: 600,
LOG_DEBUG: "0",
LOG_INFO: "0",
LOG_LEVEL: process.env.LOG_LEVEL || "critical",
};
global.constants = {

Просмотреть файл

@ -1,20 +1,5 @@
const crypto = require("crypto");
const makeLog = (name, enable) => (...args) => {
if (!enable) {
return;
}
console.log(`[${name}]`, ...args);
};
const { LOG_INFO = "1", LOG_ERROR = "1", LOG_DEBUG = "0" } = process.env;
const logError = makeLog("error", LOG_ERROR === "1");
const logDebug = makeLog("debug", LOG_DEBUG === "1");
const logInfo = makeLog("info", LOG_INFO === "1");
const jsonPretty = data => JSON.stringify(data, null, " ");
const md5 = data =>
@ -28,10 +13,6 @@ const wait = delay => new Promise(resolve => setTimeout(resolve, delay));
const epochNow = () => Math.floor(Date.now() / 1000);
module.exports = {
makeLog,
logDebug,
logInfo,
logError,
jsonPretty,
md5,
wait,

1232
package-lock.json сгенерированный

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -67,6 +67,7 @@
"eslint-plugin-node": "7.0.1",
"hawk": "7.0.7",
"raven": "2.6.4",
"mozlog": "2.2.0",
"request": "2.88.0",
"request-promise-native": "1.0.5"
}