Skip to content

Instantly share code, notes, and snippets.

View dailenspencer's full-sized avatar

Dailen Spencer dailenspencer

  • ClickTripz, LLC
  • Kauai, Hawaii
View GitHub Profile
import React from 'react'
import {Route} from 'react-router-dom';
import TransitionGroup from 'react-transition-group/TransitionGroup';
import _ from 'lodash';
import HomePage from 'components/Pages/Home/home.page';
import ParallaxPage from 'components/Pages/Home/parallax.page';
import NavigationPage from 'components/Pages/Navigation/navigation.page';
import BlogListPage from 'components/Pages/Blog/blogList.page';
import React from "react";
import { Switch, Route } from "react-router-dom";
import asyncComponent from "../../helpers/AsyncFunc";
class AppRouter extends React.Component {
render() {
const { url } = this.props;
return (
<Switch>
# -*- coding: utf-8 -*-
import scrapy
import re
from scrapy import Request
import csv
import os
# todo: use scrapy-proxies to ensure we dont get throttled or banned by craigslist when sifting through urls and extractin data
class JobsSpider(scrapy.Spider):
# -- OVERVIEW --
# This script will handle the execution of a the CraigsList Jobs Scrapy crawler
# which gathers job listing contents. We then upload the results to a s3 bucket.
# save file with timestamp prepended
timestamp=$(date +%Y-%m-%d_%H-%M-%S)
filename="$timestamp"_results.json
# execute scrapy and store results in json file
scrapy crawl jobs -o $filename
@dailenspencer
dailenspencer / compress-media-folder.sh
Created May 9, 2018 01:12
Compress Media Folder Bash Script
#!/bin/bash
# OVERVIEW
# This bash script handles the organization and compression for blog media items winthin the app
# when a directory is suppied to the script, the script loops through the directory's items and handles the
# renaming and compression accordingly. Currently, there is only one type of media item that is handled by the script
# -- image items - .JPG, .jpg
targetedFolder=$1
# Ensure argument is a valid directory