This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
let g:python_host_prog='/usr/local/bin/python' | |
set nocompatible | |
filetype off | |
let mapleader=";" | |
set relativenumber number " 相对行号 | |
au FocusLost * :set norelativenumber number | |
au FocusGained * :set relativenumber | |
" 插入模式下用绝对行号, 普通模式下用相对 |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# ---------------------------------------------- | |
# DenseCap | |
# Written by InnerPeace | |
# ---------------------------------------------- | |
"""read large region description json file""" | |
import ijson | |
import json | |
import sys |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import tensorflow as tf | |
class testCell(tf.nn.rnn_cell.RNNCell): | |
def __init__(self, input_size=1, state_size=1): | |
self.input_size = input_size | |
self._state_size = state_size | |
@property | |
def state_size(self): |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import matplotlib.pyplot as plt | |
fig, ax = plt.subplots(nrows=1, ncols=2, figsize=(16, 9)) | |
plt.subplot(1, 2, 1) | |
plt.hist(data, normed=True, bins=30) | |
plt.subplot(1, 2, 2) | |
#plot things... | |
plt.xlabel('x') | |
plt.ylabel('y') | |
plt.title('tile') |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import tensorflow as tf | |
#aplly exponential decay on learning rate | |
global_step = tf.Variable(0, trainable=False) | |
stater_learning_rate = lr #for start | |
learning_rate = tf.train.exponential_decay(starter_learning_rate, global_step, | |
decay_steps, decay_rate, staircase=True) | |
optimizer = tf.train.AdamOptimizer(learning_rate) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import tensorflow as tf | |
def get_v_names(): | |
tf.reset_default_graph() | |
with tf.variable_scope('test'): | |
a = tf.Variable(tf.ones([2]),tf.float32, name='a') | |
b = tf.Variable(tf.zeros([3]), dtype=tf.float32, name='b',trainable=False) | |
# GLOBAL_VARIABLES: get all variable, e.g. a & b | |
# GLOBAL_TRAINABLE: get trainable variable, e.g. a | |
for i in tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES):#TRAINABLE |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
from __future__ import absolute_import | |
from __future__ import division | |
from __future__ import print_function | |
import math | |
from tensorflow.python.framework import constant_op | |
from tensorflow.python.framework import dtypes | |
from tensorflow.python.ops import array_ops | |
from tensorflow.python.ops import linalg_ops |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import tensorflow as tf | |
#with default settings | |
def scope_defsetting(): | |
with tf.variable_scope('qa', initializer=tf.zeros_initializer()): | |
#tf.get_variable_scope().reuse_variables() | |
# if without setting initializer in get_varaible, use the default one in the scope. | |
a = tf.get_variable('a', [2,4], tf.float32) | |
#tf.get_variable_scope().reuse == True | |
with tf.Session() as sess: |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import tensorflow as tf | |
def tf_attention(): | |
a = tf.constant([[2,3],[4,5]]) | |
b = tf.constant([[[1,1,1],[2,2,2]],[[3,3,3],[4,4,4]]]) | |
## | |
#what we want is: [[[2,2,2],[6,6,6]],[[12,12,12],[20,20,20]]] | |
#then sum it over dimension-1 to get: [[ 8 8 8], [32 32 32]] | |
## | |
d = tf.expand_dims(a, 2) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import tensorflow as tf | |
# 1 | |
# add it in one line, NOTE: we need lanch a session before. | |
writer = tf.summary.FileWriter('path/to/file/name', sess.graph) | |
# 2 | |
writer = tf.summary.FileWriter('path/to/file/name') | |
#somewhere after lanching the session to add graph. NOTE: add it later. | |
writer.add_graph(sess.graph) |