Connecting Auth0 to DynamoDb and CloudWatch

If your website uses Auth0 for user authentication, and you have subscribed to an Enterprise Plan (https://auth0.com/pricing), you can store your user data in your own database, rather that Auth0’s internal database. The connection between Auth0 and your database is done through a set of Node.js scripts that you supply. Auth0 provides template scripts for many databases, but not AWS DynamoDb.

What if you want to store your users on DynamoDb, and log log-in/log-out etc. events in a daily CloudWatch log stream? Here is how you do it ….

Log-in script

Auth0’s custom database connection requires 6 scripts to be defined: Login, Create, Verify, Change Password, Get User and Delete. Here is a script for Login. Once you get the picture, you can copy and adapt this script for the 5 others. Note that I have assumed a certain structure in my Dynamodb user table. If your structure (fields and global indicies) are different, you will need to do some minor adjustment accordingly.

function login(email, password, callback) {

var AWS = require('aws-sdk');
var crypto = require('crypto');

var region = configuration.region;
var accessKeyId = configuration.accessKeyId;
var secretAccessKey = configuration.secretAccessKey;
var groupName = configuration.groupName;
var userTableName = configuration.userTableName;
var salt = configuration.salt;
var streamNamePrefix = configuration.streamNamePrefix;

var logEvents = [];


var cloudwatchlogs = new AWS.CloudWatchLogs({
    apiVersion: '2014-03-28',
    region: region,
    accessKeyId: accessKeyId,
    secretAccessKey: secretAccessKey
});

var dynamodb = new AWS.DynamoDB({
    apiVersion: '2012-08-10',
    region: region,
    accessKeyId: accessKeyId,
    secretAccessKey: secretAccessKey
});



function zeroPad(num, places) {
    var zero = places - num.toString().length + 1;
    return Array(+(zero > 0 && zero)).join("0") + num;
}

function todayAsString() {
    var mydate = new Date();
    return zeroPad(1900 + mydate.getYear(), 4) +
        '-' + zeroPad(mydate.getMonth(), 2) +
        '-' + zeroPad(mydate.getDate(), 2);
}

var streamName = streamNamePrefix + '/' + todayAsString();

function makeLogGroup(cb) {
    cloudwatchlogs.createLogGroup({
            logGroupName: groupName
        },
        cb
    );
}

function makeLogStream(cb) {
    cloudwatchlogs.createLogStream({
            logGroupName: groupName,
            logStreamName: streamName
        },
        cb);
}

function createLogGroupIfNotExists(cb) {
    var tryCount = 0;
    var maxTries = 3;

    function tryOnce() {
        cloudwatchlogs.describeLogStreams({
                logGroupName: groupName,
                logStreamNamePrefix: streamName
            },
            function(err, data) {
                if (err) {
                    if ((err.code === 'ResourceNotFoundException') && (tryCount++ < = maxTries)) {
                        makeLogGroup(function(gerr, gdata) {
                            if (gerr) {
                                cb(gerr);
                            } else {
                                tryOnce();
                            }
                        });
                    } else {
                        cb(err);
                    }
                } else {
                    cb(null, data);
                }
            });
    }

    tryOnce();
}

function createLogStreamIfNotExists(cb) {
    var tryCount = 0;
    var maxTries = 3;

    function tryOnce() {
        createLogGroupIfNotExists(function(err, data) {
            if (err) {
                cb(err);
            } else {
                if (data.logStreams.length === 0) {
                    makeLogStream(function(lerr, ldata) {
                        if ((lerr) && (tryCount++ <= maxTries)) {
                            cb(lerr);
                        } else {
                            tryOnce();
                        }
                    });
                } else {
                    cb(null, data.logStreams[0]);
                }
            }
        });
    }

    tryOnce();
}

function createLogEvent(rec) {
    return {
        message: typeof rec === 'string' ?
            rec : JSON.stringify(rec),
        timestamp: typeof rec === 'object' && rec.time ?
            new Date(rec.time).getTime() : Date.now()
    };
}

function cloudLog(event) {
    logEvents.push(createLogEvent(event));
}

function emit(cb) {
    function doCallBack(err, data) {
        if (typeof cb === 'function') {
            cb(err, data);
        }
    }

    if (logEvents.length) {
        createLogStreamIfNotExists(function(err, streamMetaData) {
            var nextToken = streamMetaData ? streamMetaData.uploadSequenceToken : null;
            if (err) {
                if (typeof cb === 'function') {
                    cb(err);
                }
            } else {
                var params = {
                    logEvents: logEvents,
                    logGroupName: groupName,
                    logStreamName: streamName
                };
                logEvents = [];
                if (nextToken) {
                    params.sequenceToken = nextToken;
                }
                cloudwatchlogs.putLogEvents(params, function(err, data) {
                    doCallBack(err, data);
                });
            }
        });
    } else {
        doCallBack();
    }
}

function getUserByEmail(email, cb) {
    dynamodb.query({
            TableName: userTableName,
            IndexName: 'email-index',
            ExpressionAttributeNames: {
                "#u": "user-id"
            },
            ExpressionAttributeValues: {
                ":v1": {
                    S: email
                }
            },
            KeyConditionExpression: 'email = :v1',
            ProjectionExpression: '#u,email,nick,phash'
        },
        cb);
}

function hashString(datum) {
    var hash = crypto.createHash('sha256');
    hash.update(datum);
    return hash.digest('base64');
}

function hashPassword(user_id,given_password) {
    return hashString(user_id + '|' + salt + '|' + given_password);
}

function pass(user_id, given_password, phash) {
    return hashPassword(user_id,given_password) === phash;
}

function testCredentials(email, password, cb) {
    getUserByEmail(email, function(err, data) {
        var user = null;
        if ((!err) && (data.Items.length >= 0)) {
            user = {
                user_id: data.Items[0]['user-id'].S,
                nickname: data.Items[0].nick.S,
                email: data.Items[0].email.S,
                phash: data.Items[0].phash.S
            };
        }
        if (user && ((user.email !== email) || (!pass(user.user_id, password, user.phash)))) user = null;
        cb(err, user);
    });
}

    cloudLog({method:'login',control:'ENTER'});
    testCredentials(email, password, function(err, user) {
        if (err) {
            cloudLog(err);

        } else if (user) {
            cloudLog({
                method: 'login',
                pass: 'true',
                email: email,
                user_id: user.user_id
            });

        } else {
            cloudLog({
                method: 'login',
                pass: 'false',
                email: email
            });
        }
        cloudLog({method:'login',control:'EXIT'});
        emit(function(emit_err, datum) {
            callback(err);
        });
    });
}

Settings

In the settings, you will need to define the following configuration items:

  • region AWS region code for both the dynamodb table and cloudwatch logs.
  • accessKeyId IAM Access key for AWS operations. See the section on User Policies below.
  • secretAccessKey Goes with accessKeyId.
  • groupName The ClouldWatch Log Group name. The group will be created if it does not exist.
  • userTableName The DynamoDb table name.
  • salt Just some random secret string to salt the passwords.
  • streamNamePrefix Prefix for the CloudWatch log stream name.

IAM User Policies

You will need to assign at least the following policies (after substitution of place-markers) to the user, who’s credentials were passed in the Auth0 custom database connection settings above.

{
    "Version": "2012-10-17",
    "Statement": [
        {
            "Effect": "Allow",
            "Action": "logs:CreateLogGroup",
            "Resource": "arn:aws:logs:<#region>:<#account>:*"
        },
        {
            "Effect": "Allow",
            "Action": [
                "logs:DescribeLogStreams",
                "logs:CreateLogStream",
                "logs:PutLogEvents"
            ],
            "Resource": [
                "arn:aws:logs:<#region>:<#account>:log-group:<#group>:*"
            ]
        },
        {
            "Effect": "Allow",
            "Action": [
                "dynamodb:DeleteItem",
                "dynamodb:GetItem",
                "dynamodb:PutItem",
                "dynamodb:Scan",
                "dynamodb:UpdateItem"
            ],
            "Resource": "arn:aws:dynamodb:<#region>:<#account>:table/<#table>"
        },
        {
            "Effect": "Allow",
            "Action": [
                "dynamodb:Query"
            ],
            "Resource": "arn:aws:dynamodb:<#region>:<#account>:table/<#table>/index/<#index>"
        }
    ]
}

… where the following place-markers are substituted for your particular values:

  • <#region> The AWS region code for dynamodb and cloudwatch. Eg. ap-southeast-2 .
  • <#account> Your AWS account number/identifier.
  • <#group> The name of the CloudWatch log group.
  • <#table> The name of the DynamoDb user table.
  • <#index> The name of the global index used to look-up the user table by email address. In the supplied code fragment, this name is ’email-index’. Change as you require.

Dynamodb schema

I have assumed that the user table has schema that follows this pattern of item:

{
  "email": "sean@seanbdurkin.id.au",
  "email_verified": true,
  "nick": "Sean",
  "phash": "<#redacted>",
  "user-id": "sean"
}

where the primary key is user-id. You will also need a global index to look-up users based on email. Probably you should also add fields for username and user_metadata.

A note about logging

The CloudWatch log name will be ‘< #streamNamePrefix>/< #Date>‘ where < #streamNamePrefix> is as given by the settings, and < #Date> is today’s date. There is an assumption that there will be no other log streams which begin with ‘< #streamNamePrefix>/< #Date>‘, but are not ‘< #streamNamePrefix>/< #Date>‘. So when we search for streams prefixed with ‘< #streamNamePrefix>/< #Date>‘, we only get zero streams, or exactly one stream, which is the stream we want. If this assumption is not going to hold in your architecture, adjust the code accordingly.

What about the other 5 scripts?

You can develop them yourself. Once you see how the login script is made, it’s just a case of cut and paste, with some obvious modification.

After-thoughts

The custom database connection is only available on the expensive Enterprise plan, or on a 30 day trial. If it was on the free plan, I would use Auth0 for my amateur and Start-Up projects. It is not good keeping your user table in a foreign database, because you can’t join it with other tables.

This entry was posted in Web hosting. Bookmark the permalink.