diff --git a/.github/workflows/push.yml b/.github/workflows/push.yml
new file mode 100644
index 0000000..1b941bf
--- /dev/null
+++ b/.github/workflows/push.yml
@@ -0,0 +1,55 @@
+name: Continuous Integration
+on: push
+jobs:
+  unit-tests:
+    runs-on: '${{ matrix.os }}'
+    strategy:
+      matrix:
+        os:
+          - ubuntu-20.04
+        node-version:
+          - 12.x
+          - 14.x
+          - 16.x
+    steps:
+      - uses: actions/checkout@v2
+      - name: 'Install node.js ${{ matrix.node-version }}'
+        uses: actions/setup-node@v2-beta
+        with:
+          node-version: '${{ matrix.node-version }}'
+      - name: Run unit tests
+        run: |
+          npm install
+          npm run ci
+  npm-publish:
+    needs: unit-tests
+    if: github.ref == 'refs/heads/master' && github.event_name == 'push'
+    runs-on: ubuntu-20.04
+    steps:
+      - uses: actions/checkout@v2
+      - name: Install Node.js
+        uses: actions/setup-node@v2-beta
+        with:
+          node-version: 16.x
+      - name: Run semantic-release
+        env:
+          GH_TOKEN: ${{ secrets.GH_SEMANTIC_RELEASE_TOKEN }}
+          NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
+        run: >
+          if [[ -n "$GH_TOKEN" && -n "$NPM_TOKEN" ]]; then
+            curl "https://raw.githubusercontent.com/pelias/ci-tools/master/semantic-release.sh" | bash -
+          fi
+  build-docker-images:
+    # run this job if the unit tests passed and the npm-publish job was a success or was skipped
+    # note: github actions won't run a job if you don't call one of the status check functions, so `always()` is called since it evalutes to `true`
+    if: ${{ always() && needs.unit-tests.result == 'success' && (needs.npm-publish.result == 'success' || needs.npm-publish.result == 'skipped') }}
+    needs: [unit-tests, npm-publish]
+    runs-on: ubuntu-20.04
+    steps:
+      - uses: actions/checkout@v2
+      - name: Build Docker images
+        env:
+          DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
+          DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }}
+        run: |
+          curl "https://raw.githubusercontent.com/pelias/ci-tools/master/build-docker-images.sh" | bash -
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..9d542fa
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,3 @@
+node_modules
+*.log
+*.pbf
\ No newline at end of file
diff --git a/.jshintignore b/.jshintignore
new file mode 100644
index 0000000..3c3629e
--- /dev/null
+++ b/.jshintignore
@@ -0,0 +1 @@
+node_modules
diff --git a/.jshintrc b/.jshintrc
new file mode 100644
index 0000000..e5c5749
--- /dev/null
+++ b/.jshintrc
@@ -0,0 +1,22 @@
+{
+  "node": true,
+  "curly": true,
+  "eqeqeq": true,
+  "esversion": 6,
+  "freeze": true,
+  "immed": true,
+  "indent": 2,
+  "latedef": false,
+  "newcap": true,
+  "noarg": true,
+  "noempty": true,
+  "nonbsp": true,
+  "nonew": true,
+  "plusplus": false,
+  "quotmark": "single",
+  "undef": true,
+  "unused": false,
+  "maxparams": 4,
+  "maxdepth": 4,
+  "maxlen": 140
+}
diff --git a/.npmrc b/.npmrc
new file mode 100644
index 0000000..43c97e7
--- /dev/null
+++ b/.npmrc
@@ -0,0 +1 @@
+package-lock=false
diff --git a/README.md b/README.md
new file mode 100644
index 0000000..8313850
--- /dev/null
+++ b/README.md
@@ -0,0 +1,48 @@
+>This repository is part of the [Pelias](https://github.com/pelias/pelias)
+>project. Pelias is an open-source, open-data geocoder originally sponsored by
+>[Mapzen](https://www.mapzen.com/). Our official user documentation is
+>[here](https://github.com/pelias/documentation).
+
+# Pelias Elasticsearch database client
+
+This module provides
+an [Elasticsearch](https://www.elastic.co/products/elasticsearch) client integrated with [`pelias-logger`](https://github.com/pelias/logger).
+
+[![Greenkeeper badge](https://badges.greenkeeper.io/pelias/pelias-elasticsearch.svg)](https://greenkeeper.io/)
+
+## Install Dependencies
+
+
+```bash
+$ npm install
+```
+
+## Usage
+
+```javascript
+'use strict';
+
+const buildClient = require('pelias-elasticsearch');
+const config = require('pelias-config').generate();
+const esclient = buildClient(config);
+
+esclient.indices.exists({ index: config.schema.indexName }, (err, { body }) => {
+  console.log(`index ${config.schema.indexName} exists?: ${body}`)
+});
+```
+
+## Contributing
+
+Please fork and pull request against upstream master on a feature branch.
+
+Pretty please; provide unit tests and script fixtures in the `test` directory.
+
+### Running Unit Tests
+
+```bash
+$ npm test
+```
+
+### Continuous Integration
+
+CI tests every release against all currently supported Node.js versions.
diff --git a/index.js b/index.js
new file mode 100644
index 0000000..4249daf
--- /dev/null
+++ b/index.js
@@ -0,0 +1,5 @@
+if (process.env.NODE_ENV !== 'test') {
+  require('./src/configValidation').validate(require('pelias-config').generate());
+}
+
+module.exports = require("./src/client");
diff --git a/package.json b/package.json
new file mode 100644
index 0000000..b43130c
--- /dev/null
+++ b/package.json
@@ -0,0 +1,49 @@
+{
+  "name": "pelias-elasticsearch",
+  "version": "0.0.0-development",
+  "description": "Elasticsearch integration for Pelias",
+  "engines": {
+    "node": ">=10.0.0"
+  },
+  "main": "index.js",
+  "scripts": {
+    "test": "NODE_ENV=test node test/run.js | tap-spec",
+    "lint": "jshint .",
+    "validate": "npm ls",
+    "ci": "npm test"
+  },
+  "repository": {
+    "type": "git",
+    "url": "https://github.com/pelias/pelias-elasticsearch"
+  },
+  "keywords": [
+    "pelias",
+    "elasticsearch",
+    "client"
+  ],
+  "license": "MIT",
+  "bugs": {
+    "url": "https://github.com/pelias/pelias-elasticsearch/issues"
+  },
+  "homepage": "https://github.com/pelias/pelias-elasticsearch",
+  "devDependencies": {
+    "intercept-stdout": "^0.1.2",
+    "precommit-hook": "^3.0.0",
+    "proxyquire": "^2.0.0",
+    "tap-spec": "^5.0.0",
+    "tape": "^5.0.0"
+  },
+  "dependencies": {
+    "@elastic/elasticsearch": "~7.17.0",
+    "pelias-config": "https://github.com/michaelkirk-pelias/config#mkirk/elastic8",
+    "pelias-logger": "^1.2.1"
+  },
+  "pre-commit": [
+    "lint",
+    "validate",
+    "test"
+  ],
+  "release": {
+    "success": []
+  }
+}
diff --git a/src/client.js b/src/client.js
new file mode 100644
index 0000000..90c3844
--- /dev/null
+++ b/src/client.js
@@ -0,0 +1,65 @@
+const elasticsearch = require('@elastic/elasticsearch');
+const peliasSettings = require('pelias-config').generate();
+const peliasLogger = require('pelias-logger');
+
+/**
+ * 
+ * @param {elasticsearch.Client} client
+ */
+function configureLogging(client) {
+  const logger = peliasLogger.get('es-client');
+  client.on('serialization', (err, meta) => {
+    if (err) {
+      logger.error('serializationError=', err.toString());
+    } else {
+      logger.debug('serialization OK');
+    }
+  });
+  client.on('request', (err, meta) => {
+    if (err) {
+      logger.error('requestError=', err.toString());
+    } else {
+      logger.debug('request OK');
+    }
+  });
+  client.on('deserialization', (err, meta) => {
+    if (err) {
+      logger.error('deserializationError=', err.toString());
+    } else {
+      logger.debug('deserialization OK');
+    }
+  });
+  client.on('response', (err, meta) => {
+    if (err) {
+      logger.error('responseError=', err.toString());
+    } else {
+      logger.debug('response OK');
+    }
+  });
+  client.on('sniff', (err, meta) => {
+    if (err) {
+      logger.error('sniffError=', err.toString());
+    } else {
+      logger.debug('sniff OK');
+    }
+  });
+  client.on('resurrect', (err, meta) => {
+    if (err) {
+      logger.error('resurrectError=', err.toString());
+    } else {
+      logger.debug('resurrect OK');
+    }
+  });
+}
+
+/**
+ * 
+ * @param {elasticsearch.ClientOptions} [clientOptions]
+ * @return {elasticsearch.Client}
+ */
+module.exports = function(clientOptions){
+  const client = new elasticsearch.Client( clientOptions || peliasSettings.esclient || {} );
+  configureLogging(client);
+  return client;
+};
+
diff --git a/src/configValidation.js b/src/configValidation.js
new file mode 100644
index 0000000..d5c2e2d
--- /dev/null
+++ b/src/configValidation.js
@@ -0,0 +1,27 @@
+'use strict';
+
+const Joi = require('@hapi/joi');
+const elasticsearch = require('@elastic/elasticsearch');
+
+// Schema Configuration
+// dbclient.statFrequency: populated by defaults if not overridden
+// esclient: object, validation performed by elasticsearch module
+const schema = Joi.object().keys({
+  esclient: Joi.object().required().keys({
+    nodes: Joi.array().items(Joi.string()).min(1),
+    node: Joi.string(),
+    requestTimeout: Joi.number().integer().min(0)
+  }).xor('node', 'nodes').unknown(true),
+  schema: Joi.object().required().keys({
+    indexName: Joi.string().required()
+  })
+}).unknown(true);
+
+module.exports = {
+  validate: function validate(config) {
+    const validate = schema.validate(config);
+    if (validate.error) {
+      throw new Error(validate.error.details[0].message);
+    }
+  }
+};
diff --git a/test/client.js b/test/client.js
new file mode 100644
index 0000000..2408c64
--- /dev/null
+++ b/test/client.js
@@ -0,0 +1,51 @@
+'use strict';
+const buildClient = require('../src/client');
+const intercept = require('intercept-stdout');
+
+module.exports.tests = {};
+
+module.exports.tests.build = function(test) {
+  test('building a new client should not error', function(t) {
+    const client = buildClient();
+    t.ok(client);
+    t.end()
+  });
+};
+
+module.exports.tests.logging = function(test) {
+  test('output is logged to pelias logger', async function(t) {
+    const client = buildClient({ node: "http://non-existant-host:1234" } );
+
+    let stdoutBuffer = '';
+    let stderrBuffer = '';
+
+    const unhook_intercept = intercept(
+      (stdout) => { stdoutBuffer += stdout; return '' },
+      (stderr) => { stderrBuffer += stderr; return '' }
+    );
+
+    try {
+      let result = await client.search({ index: 'non-existant-index' });
+      t.fail("should have failed");
+    } catch (err) {
+      t.ok(err);
+    } finally {
+      unhook_intercept();
+    }
+
+    t.match(stderrBuffer, /responseError=/);
+
+    t.end()
+  });
+};
+
+module.exports.all = function (tape, common) {
+
+  function test(name, testFunction) {
+    return tape('index: ' + name, testFunction);
+  }
+
+  for( var testCase in module.exports.tests ){
+    module.exports.tests[testCase](test, common);
+  }
+};
diff --git a/test/configValidation.js b/test/configValidation.js
new file mode 100644
index 0000000..463276d
--- /dev/null
+++ b/test/configValidation.js
@@ -0,0 +1,112 @@
+'use strict';
+const { validate } = require('../src/configValidation');
+const proxyquire = require('proxyquire').noCallThru();
+
+module.exports.tests = {};
+
+module.exports.tests.build = function(test) {
+  test('valid `nodes` config', function(t) {
+    let config = {
+      esclient: {
+        nodes: ["http://my-host:123"]
+      },
+      schema: {
+        indexName: "example_index"
+      }
+    };
+
+    t.doesNotThrow(() => {
+      proxyquire('../src/configValidation', {
+        '@elastic/elasticsearch': {
+          Client: function() {
+            return { indices: { exists: (indexName, cb) => { cb(false, { body: true }); } } };
+          }
+        }
+      }).validate(config);
+    }, 'no error should have been thrown');
+
+    t.end()
+  });
+
+  test('valid `node` config', function(t) {
+    let config = {
+      esclient: {
+        node: "http://my-host:123"
+      },
+      schema: {
+        indexName: "example_index"
+      }
+    };
+
+    t.doesNotThrow(() => {
+      proxyquire('../src/configValidation', {
+        '@elastic/elasticsearch': {
+          Client: function() {
+            return { indices: { exists: (indexName, cb) => { cb(false, { body: true }); } } };
+          }
+        }
+      }).validate(config);
+    }, 'no error should have been thrown');
+
+    t.end()
+  });
+
+  test('cannot specify both `node` and `nodes`', function(t) {
+    let config = {
+      esclient: {
+        node: "http://my-host:123",
+        nodes: ["http://my-host:666"]
+      },
+      schema: {
+        indexName: "example_index"
+      }
+    };
+
+    t.throws(() => {
+      proxyquire('../src/configValidation', {
+        '@elastic/elasticsearch': {
+          Client: function() {
+            return { indices: { exists: (indexName, cb) => { cb(false, { body: true }); } } };
+          }
+        }
+      }).validate(config);
+    }, /exclusive.*node.*nodes/);
+
+    t.end()
+  });
+
+  test('empty nodes', function(t) {
+    let config = {
+      esclient: {
+        nodes: []
+      },
+      schema: {
+        indexName: "example_index"
+      }
+    };
+
+    t.throws(() => {
+      proxyquire('../src/configValidation', {
+        '@elastic/elasticsearch': {
+          Client: function() {
+            return { indices: { exists: (indexName, cb) => { cb(false, { body: true }); } } };
+          }
+        }
+      }).validate(config);
+    }, /nodes.*must contain at least 1/);
+
+    t.end()
+  });
+
+};
+
+module.exports.all = function (tape, common) {
+
+  function test(name, testFunction) {
+    return tape('index: ' + name, testFunction);
+  }
+
+  for( var testCase in module.exports.tests ){
+    module.exports.tests[testCase](test, common);
+  }
+}
diff --git a/test/run.js b/test/run.js
new file mode 100644
index 0000000..2447814
--- /dev/null
+++ b/test/run.js
@@ -0,0 +1,13 @@
+
+var tape = require('tape');
+var common = {};
+
+var tests = [
+  require('./client'),
+  require('./configValidation')
+  // other tests go here
+];
+
+tests.map(function(t) {
+  t.all(tape, common);
+});