type: PIN
Consumer key: 3nVuSoBZnx6U4vzUxf5w
Consumer secret: Bcs59EFbbsdF6Sl9Ng71smgStWEGwXXKSjYvPVt7qys
type: PIN
Consumer key: IQKbtAYlXLripLGPWd0HUA
DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE | |
Version 2, December 2004 | |
Copyright (C) 2011 Jed Schmidt <http://jed.is> | |
Everyone is permitted to copy and distribute verbatim or modified | |
copies of this license document, and changing it is allowed as long | |
as the name is changed. | |
DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE |
You may have thought of running nightmare on AWS Lambda. But before we can run it on Lambda, we need first to make it run on Amazon Linux.
According to AWS Documentation on Lambda Execution Environment and available Libraries we would need this AMI image with this alias amzn-ami-hvm-2016.03.3.x86_64-gp2
. Keep in mind that AMI-image-id for this instance would be different in different regions (eg):
eu-west-1
- ami-f9dd458a
us-east-1
- ami-6869aa05
You may have thought of running nightmare on AWS Lambda. But before we can run it on Lambda, we need first to make it run on Amazon Linux.
According to AWS Documentation on Lambda Execution Environment and available Libraries we would need this AMI image with this alias amzn-ami-hvm-2016.03.3.x86_64-gp2
. Keep in mind that AMI-image-id for this instance would be different in different regions (eg):
eu-west-1
- ami-f9dd458a
us-east-1
- ami-6869aa05
#Sample circle.yml for deploying a rails app to deis | |
machine: | |
pre: | |
# install the deis cli | |
- curl -sSL http://deis.io/deis-cli/install-v2.sh | bash | |
- sudo mv $PWD/deis /usr/local/bin/deis | |
deployment: | |
staging: | |
branch: master | |
commands: |
# You don't need Fog in Ruby or some other library to upload to S3 -- shell works perfectly fine | |
# This is how I upload my new Sol Trader builds (http://soltrader.net) | |
# Based on a modified script from here: http://tmont.com/blargh/2014/1/uploading-to-s3-in-bash | |
S3KEY="my aws key" | |
S3SECRET="my aws secret" # pass these in | |
function putS3 | |
{ | |
path=$1 |
// index.js | |
'use strict'; | |
var app = require('express')(); | |
var jsonParser = require('body-parser').json({limit: '1mb'}); | |
var db = require('./routes/db.js'); | |
var doc = require('./routes/document.js'); | |
app.put('/:db', jsonParser, db.createDB); |
var async = require('async') | |
var request = require('request') | |
var urls = process.argv.slice(2) | |
console.log('urls', urls) | |
async.map(urls, get, function(err, results) { | |
if (err) return console.error(err) |
var http = require('http'); | |
var bl = require('bl'); | |
var urls = []; | |
process.argv.slice(2).forEach(function(item){ | |
urls.push(item) | |
}); | |
console.log (urls) |
This was done on a stock ubuntu 14.04 cloud install, but should work on any distro (although you'd need to tweak the package install part to use yum or whatever and appropriate package names)
apt-get update && apt-get install -y curl build-essential libx11-dev libxext-dev libpcap-dev zlib1g-dev libgdbm-dev