commit 8ff41bacfb0be6429c3b5a8559d4af6e2547165a Author: latyas Date: Mon Jan 21 19:18:34 2019 +0800 init project diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..e69de29 diff --git a/README.md b/README.md new file mode 100644 index 0000000..db701f2 --- /dev/null +++ b/README.md @@ -0,0 +1,11 @@ +Tensorflow Workspace for K210 +====== + +## Classifier for ImageNet +1. 下载ImageNet数据集,按照说明解压缩训练数据集到文件夹ILSVRC2012\_img\_train,内含1000个子文件夹,每个子文件夹的命名为其分类代号(类似n02484975),每个子文件夹内为该分类的训练数据 +2. mobilenet v1定义文件:mobilenetv1/models/mobilenet\_v1.py,需要注意由于K210不支持tensorflow的SAME padding,所以在stride=2时先固定padding一圈0,然后再进行stride=2的卷积(padding=VALID) +3. 训练脚本 mobilenetv1/run\_mobilenet\_v1.sh,根据需要修改其中的参数,然后运行 +4. freeze\_graph.py将训练ckpt转成pb文件,命令格式如下: + python mobilenetv1/freeze\_graph.py model.mobilenet\_v1 ckpt\_fold pb\_file +5. 测试在ImageNet验证集上的性能,下载验证集,将文件按类别解压好(与训练集类似),运行 python mobilenetv1/validation\_imagenet.py pb\_file(or ckpt folder) val\_set\_fold +6. 预测单张图片,python mobilenetv1/predict\_one\_pic.py pb\_file(or ckpt folder) pic diff --git a/mobilenetv1/__init__.py b/mobilenetv1/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/mobilenetv1/base_func.py b/mobilenetv1/base_func.py new file mode 100644 index 0000000..8092311 --- /dev/null +++ b/mobilenetv1/base_func.py @@ -0,0 +1,387 @@ +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import os +from subprocess import Popen, PIPE +import tensorflow as tf +import numpy as np +from scipy import misc +from scipy import interpolate +from tensorflow.python.training import training +import random +import re +from tensorflow.python.platform import gfile +import math +import time +from six import iteritems + + + +def get_image_paths_and_labels(dataset): + image_paths_flat = [] + labels_flat = [] + for i in range(len(dataset)): + image_paths_flat += dataset[i].image_paths + labels_flat += [i] * len(dataset[i].image_paths) + return image_paths_flat, labels_flat + +def shuffle_examples(image_paths, labels): + shuffle_list = list(zip(image_paths, labels)) + random.shuffle(shuffle_list) + image_paths_shuff, labels_shuff = zip(*shuffle_list) + return image_paths_shuff, labels_shuff + +def random_rotate_image(image): + angle = np.random.uniform(low=-10.0, high=10.0) + return misc.imrotate(image, angle, 'bicubic') + +# 1: Random rotate 2: Random crop 4: Random flip 8: Fixed image standardization 16: Flip +RANDOM_ROTATE = 1 +RANDOM_CROP = 2 +RANDOM_FLIP = 4 +FIXED_STANDARDIZATION = 8 +FLIP = 16 +def create_input_pipeline(input_queue, image_size, nrof_preprocess_threads, batch_size_placeholder): + t=time.time() + images_and_labels_list = [] + for _ in range(nrof_preprocess_threads): + filenames, label, control = input_queue.dequeue() + images = [] + for filename in tf.unstack(filenames): + file_contents = tf.read_file(filename) + image = tf.image.decode_image(file_contents, 3) + # image = tf.image.resize_images(image, [image_size[0], image_size[1]],method=tf.image.ResizeMethod.BILINEAR) + image = tf.cond(get_control_flag(control[0], RANDOM_ROTATE), + lambda:tf.py_func(random_rotate_image, [image], tf.uint8), + lambda:tf.identity(image)) + image = tf.cond(get_control_flag(control[0], RANDOM_CROP), + lambda:tf.random_crop(image, image_size + (3,)), + lambda:tf.image.resize_image_with_crop_or_pad(image, image_size[0], image_size[1])) + image = tf.cond(get_control_flag(control[0], RANDOM_FLIP), + lambda:tf.image.random_flip_left_right(image), + lambda:tf.identity(image)) + image = tf.cond(get_control_flag(control[0], FIXED_STANDARDIZATION), + lambda:(tf.cast(image, tf.float32))/255.0, + lambda:tf.image.per_image_standardization(image)) + image = tf.cond(get_control_flag(control[0], FLIP), + lambda:tf.image.flip_left_right(image), + lambda:tf.identity(image)) + #pylint: disable=no-member + image.set_shape(image_size + (3,)) + images.append(image) + images_and_labels_list.append([images, label]) + + image_batch, label_batch = tf.train.batch_join( + images_and_labels_list, batch_size=batch_size_placeholder, + shapes=[image_size + (3,), ()], enqueue_many=True, + capacity=4 * nrof_preprocess_threads * 100, + allow_smaller_final_batch=True) + tt = time.time()-t + print('pre_process time %f' % tt) + print('LLLLLLLLLLLLLLLLL') + + + return image_batch, label_batch + +def get_control_flag(control, field): + return tf.equal(tf.mod(tf.floor_div(control, field), 2), 1) + +def _add_loss_summaries(total_loss): + """Add summaries for losses. + + Generates moving average for all losses and associated summaries for + visualizing the performance of the network. + + Args: + total_loss: Total loss from loss(). + Returns: + loss_averages_op: op for generating moving averages of losses. + """ + # Compute the moving average of all individual losses and the total loss. + loss_averages = tf.train.ExponentialMovingAverage(0.9, name='avg') + losses = tf.get_collection('losses') + loss_averages_op = loss_averages.apply(losses + [total_loss]) + + # Attach a scalar summmary to all individual losses and the total loss; do the + # same for the averaged version of the losses. + for l in losses + [total_loss]: + # Name each loss as '(raw)' and name the moving average version of the loss + # as the original loss name. + tf.summary.scalar(l.op.name +' (raw)', l) + tf.summary.scalar(l.op.name, loss_averages.average(l)) + + return loss_averages_op + +def train(total_loss, global_step, optimizer, learning_rate, moving_average_decay, update_gradient_vars, log_histograms=True): + # Generate moving averages of all losses and associated summaries. + loss_averages_op = _add_loss_summaries(total_loss) + + # Compute gradients. + with tf.control_dependencies([loss_averages_op]): + if optimizer=='ADAGRAD': + opt = tf.train.AdagradOptimizer(learning_rate) + elif optimizer=='ADADELTA': + opt = tf.train.AdadeltaOptimizer(learning_rate, rho=0.9, epsilon=1e-6) + elif optimizer=='ADAM': + opt = tf.train.AdamOptimizer(learning_rate, beta1=0.9, beta2=0.999, epsilon=0.1) + elif optimizer=='RMSPROP': + opt = tf.train.RMSPropOptimizer(learning_rate, decay=0.9, momentum=0.9, epsilon=1.0) + elif optimizer=='MOM': + opt = tf.train.MomentumOptimizer(learning_rate, 0.9, use_nesterov=True) + else: + raise ValueError('Invalid optimization algorithm') + + grads = opt.compute_gradients(total_loss, update_gradient_vars) + + # Apply gradients. + apply_gradient_op = opt.apply_gradients(grads, global_step=global_step) + + # Add histograms for trainable variables. + if log_histograms: + for var in tf.trainable_variables(): + tf.summary.histogram(var.op.name, var) + + # Add histograms for gradients. + if log_histograms: + for grad, var in grads: + if grad is not None: + tf.summary.histogram(var.op.name + '/gradients', grad) + + # Track the moving averages of all trainable variables. + variable_averages = tf.train.ExponentialMovingAverage( + moving_average_decay, global_step) + variables_averages_op = variable_averages.apply(tf.trainable_variables()) + + with tf.control_dependencies([apply_gradient_op, variables_averages_op]): + train_op = tf.no_op(name='train') + + return train_op + +def prewhiten(x): + mean = np.mean(x) + std = np.std(x) + std_adj = np.maximum(std, 1.0/np.sqrt(x.size)) + y = np.multiply(np.subtract(x, mean), 1/std_adj) + return y + +def prewhiten_fix(x): + y = x/255. + return y + +def crop(image, random_crop, image_size): + image_crop = np.zeros((image_size, image_size, 3)) + m_min = image.shape[0] if image.shape[0]image.shape[1] else image.shape[1] + + if m_max < image_size: + v_0 = (image_size-m_max)//2 + image_crop[v_0:v_0+image_size,v_0:v_0+image_size,:] = image + elif m_min < image_size: + if image.shape[0]=min_nrof_images_per_class and nrof_images_in_class-split>=1: + train_set.append(ImageClass(cls.name, paths[:split])) + test_set.append(ImageClass(cls.name, paths[split:])) + else: + raise ValueError('Invalid train/test split mode "%s"' % mode) + return train_set, test_set + +def load_model(model, input_map=None): + # Check if the model is a model directory (containing a metagraph and a checkpoint file) + # or if it is a protobuf file with a frozen graph + model_exp = os.path.expanduser(model) + if (os.path.isfile(model_exp)): + print('Model filename: %s' % model_exp) + with gfile.FastGFile(model_exp,'rb') as f: + graph_def = tf.GraphDef() + graph_def.ParseFromString(f.read()) + tf.import_graph_def(graph_def, input_map=input_map, name='') + else: + print('Model directory: %s' % model_exp) + meta_file, ckpt_file = get_model_filenames(model_exp) + + print('Metagraph file: %s' % meta_file) + print('Checkpoint file: %s' % ckpt_file) + + saver = tf.train.import_meta_graph(os.path.join(model_exp, meta_file), input_map=input_map) + saver.restore(tf.get_default_session(), os.path.join(model_exp, ckpt_file)) + +def get_model_filenames(model_dir): + files = os.listdir(model_dir) + meta_files = [s for s in files if s.endswith('.meta')] + if len(meta_files)==0: + raise ValueError('No meta file found in the model directory (%s)' % model_dir) + elif len(meta_files)>1: + raise ValueError('There should not be more than one meta file in the model directory (%s)' % model_dir) + meta_file = meta_files[0] + ckpt = tf.train.get_checkpoint_state(model_dir) + if ckpt and ckpt.model_checkpoint_path: + ckpt_file = os.path.basename(ckpt.model_checkpoint_path) + return meta_file, ckpt_file + + meta_files = [s for s in files if '.ckpt' in s] + max_step = -1 + for f in files: + step_str = re.match(r'(^model-[\w\- ]+.ckpt-(\d+))', f) + if step_str is not None and len(step_str.groups())>=2: + step = int(step_str.groups()[1]) + if step > max_step: + max_step = step + ckpt_file = step_str.groups()[0] + return meta_file, ckpt_file + +def list_variables(filename): + reader = training.NewCheckpointReader(filename) + variable_map = reader.get_variable_to_shape_map() + names = sorted(variable_map.keys()) + return names + +def write_arguments_to_file(args, filename): + with open(filename, 'w') as f: + for key, value in iteritems(vars(args)): + f.write('%s: %s\n' % (key, str(value))) diff --git a/mobilenetv1/data/eagle.jpg b/mobilenetv1/data/eagle.jpg new file mode 100644 index 0000000..8b75095 Binary files /dev/null and b/mobilenetv1/data/eagle.jpg differ diff --git a/mobilenetv1/data/label.txt b/mobilenetv1/data/label.txt new file mode 100644 index 0000000..88aa58f --- /dev/null +++ b/mobilenetv1/data/label.txt @@ -0,0 +1,1000 @@ +n01440764 +n01443537 +n01484850 +n01491361 +n01494475 +n01496331 +n01498041 +n01514668 +n01514859 +n01518878 +n01530575 +n01531178 +n01532829 +n01534433 +n01537544 +n01558993 +n01560419 +n01580077 +n01582220 +n01592084 +n01601694 +n01608432 +n01614925 +n01616318 +n01622779 +n01629819 +n01630670 +n01631663 +n01632458 +n01632777 +n01641577 +n01644373 +n01644900 +n01664065 +n01665541 +n01667114 +n01667778 +n01669191 +n01675722 +n01677366 +n01682714 +n01685808 +n01687978 +n01688243 +n01689811 +n01692333 +n01693334 +n01694178 +n01695060 +n01697457 +n01698640 +n01704323 +n01728572 +n01728920 +n01729322 +n01729977 +n01734418 +n01735189 +n01737021 +n01739381 +n01740131 +n01742172 +n01744401 +n01748264 +n01749939 +n01751748 +n01753488 +n01755581 +n01756291 +n01768244 +n01770081 +n01770393 +n01773157 +n01773549 +n01773797 +n01774384 +n01774750 +n01775062 +n01776313 +n01784675 +n01795545 +n01796340 +n01797886 +n01798484 +n01806143 +n01806567 +n01807496 +n01817953 +n01818515 +n01819313 +n01820546 +n01824575 +n01828970 +n01829413 +n01833805 +n01843065 +n01843383 +n01847000 +n01855032 +n01855672 +n01860187 +n01871265 +n01872401 +n01873310 +n01877812 +n01882714 +n01883070 +n01910747 +n01914609 +n01917289 +n01924916 +n01930112 +n01943899 +n01944390 +n01945685 +n01950731 +n01955084 +n01968897 +n01978287 +n01978455 +n01980166 +n01981276 +n01983481 +n01984695 +n01985128 +n01986214 +n01990800 +n02002556 +n02002724 +n02006656 +n02007558 +n02009229 +n02009912 +n02011460 +n02012849 +n02013706 +n02017213 +n02018207 +n02018795 +n02025239 +n02027492 +n02028035 +n02033041 +n02037110 +n02051845 +n02056570 +n02058221 +n02066245 +n02071294 +n02074367 +n02077923 +n02085620 +n02085782 +n02085936 +n02086079 +n02086240 +n02086646 +n02086910 +n02087046 +n02087394 +n02088094 +n02088238 +n02088364 +n02088466 +n02088632 +n02089078 +n02089867 +n02089973 +n02090379 +n02090622 +n02090721 +n02091032 +n02091134 +n02091244 +n02091467 +n02091635 +n02091831 +n02092002 +n02092339 +n02093256 +n02093428 +n02093647 +n02093754 +n02093859 +n02093991 +n02094114 +n02094258 +n02094433 +n02095314 +n02095570 +n02095889 +n02096051 +n02096177 +n02096294 +n02096437 +n02096585 +n02097047 +n02097130 +n02097209 +n02097298 +n02097474 +n02097658 +n02098105 +n02098286 +n02098413 +n02099267 +n02099429 +n02099601 +n02099712 +n02099849 +n02100236 +n02100583 +n02100735 +n02100877 +n02101006 +n02101388 +n02101556 +n02102040 +n02102177 +n02102318 +n02102480 +n02102973 +n02104029 +n02104365 +n02105056 +n02105162 +n02105251 +n02105412 +n02105505 +n02105641 +n02105855 +n02106030 +n02106166 +n02106382 +n02106550 +n02106662 +n02107142 +n02107312 +n02107574 +n02107683 +n02107908 +n02108000 +n02108089 +n02108422 +n02108551 +n02108915 +n02109047 +n02109525 +n02109961 +n02110063 +n02110185 +n02110341 +n02110627 +n02110806 +n02110958 +n02111129 +n02111277 +n02111500 +n02111889 +n02112018 +n02112137 +n02112350 +n02112706 +n02113023 +n02113186 +n02113624 +n02113712 +n02113799 +n02113978 +n02114367 +n02114548 +n02114712 +n02114855 +n02115641 +n02115913 +n02116738 +n02117135 +n02119022 +n02119789 +n02120079 +n02120505 +n02123045 +n02123159 +n02123394 +n02123597 +n02124075 +n02125311 +n02127052 +n02128385 +n02128757 +n02128925 +n02129165 +n02129604 +n02130308 +n02132136 +n02133161 +n02134084 +n02134418 +n02137549 +n02138441 +n02165105 +n02165456 +n02167151 +n02168699 +n02169497 +n02172182 +n02174001 +n02177972 +n02190166 +n02206856 +n02219486 +n02226429 +n02229544 +n02231487 +n02233338 +n02236044 +n02256656 +n02259212 +n02264363 +n02268443 +n02268853 +n02276258 +n02277742 +n02279972 +n02280649 +n02281406 +n02281787 +n02317335 +n02319095 +n02321529 +n02325366 +n02326432 +n02328150 +n02342885 +n02346627 +n02356798 +n02361337 +n02363005 +n02364673 +n02389026 +n02391049 +n02395406 +n02396427 +n02397096 +n02398521 +n02403003 +n02408429 +n02410509 +n02412080 +n02415577 +n02417914 +n02422106 +n02422699 +n02423022 +n02437312 +n02437616 +n02441942 +n02442845 +n02443114 +n02443484 +n02444819 +n02445715 +n02447366 +n02454379 +n02457408 +n02480495 +n02480855 +n02481823 +n02483362 +n02483708 +n02484975 +n02486261 +n02486410 +n02487347 +n02488291 +n02488702 +n02489166 +n02490219 +n02492035 +n02492660 +n02493509 +n02493793 +n02494079 +n02497673 +n02500267 +n02504013 +n02504458 +n02509815 +n02510455 +n02514041 +n02526121 +n02536864 +n02606052 +n02607072 +n02640242 +n02641379 +n02643566 +n02655020 +n02666196 +n02667093 +n02669723 +n02672831 +n02676566 +n02687172 +n02690373 +n02692877 +n02699494 +n02701002 +n02704792 +n02708093 +n02727426 +n02730930 +n02747177 +n02749479 +n02769748 +n02776631 +n02777292 +n02782093 +n02783161 +n02786058 +n02787622 +n02788148 +n02790996 +n02791124 +n02791270 +n02793495 +n02794156 +n02795169 +n02797295 +n02799071 +n02802426 +n02804414 +n02804610 +n02807133 +n02808304 +n02808440 +n02814533 +n02814860 +n02815834 +n02817516 +n02823428 +n02823750 +n02825657 +n02834397 +n02835271 +n02837789 +n02840245 +n02841315 +n02843684 +n02859443 +n02860847 +n02865351 +n02869837 +n02870880 +n02871525 +n02877765 +n02879718 +n02883205 +n02892201 +n02892767 +n02894605 +n02895154 +n02906734 +n02909870 +n02910353 +n02916936 +n02917067 +n02927161 +n02930766 +n02939185 +n02948072 +n02950826 +n02951358 +n02951585 +n02963159 +n02965783 +n02966193 +n02966687 +n02971356 +n02974003 +n02977058 +n02978881 +n02979186 +n02980441 +n02981792 +n02988304 +n02992211 +n02992529 +n02999410 +n03000134 +n03000247 +n03000684 +n03014705 +n03016953 +n03017168 +n03018349 +n03026506 +n03028079 +n03032252 +n03041632 +n03042490 +n03045698 +n03047690 +n03062245 +n03063599 +n03063689 +n03065424 +n03075370 +n03085013 +n03089624 +n03095699 +n03100240 +n03109150 +n03110669 +n03124043 +n03124170 +n03125729 +n03126707 +n03127747 +n03127925 +n03131574 +n03133878 +n03134739 +n03141823 +n03146219 +n03160309 +n03179701 +n03180011 +n03187595 +n03188531 +n03196217 +n03197337 +n03201208 +n03207743 +n03207941 +n03208938 +n03216828 +n03218198 +n03220513 +n03223299 +n03240683 +n03249569 +n03250847 +n03255030 +n03259280 +n03271574 +n03272010 +n03272562 +n03290653 +n03291819 +n03297495 +n03314780 +n03325584 +n03337140 +n03344393 +n03345487 +n03347037 +n03355925 +n03372029 +n03376595 +n03379051 +n03384352 +n03388043 +n03388183 +n03388549 +n03393912 +n03394916 +n03400231 +n03404251 +n03417042 +n03424325 +n03425413 +n03443371 +n03444034 +n03445777 +n03445924 +n03447447 +n03447721 +n03450230 +n03452741 +n03457902 +n03459775 +n03461385 +n03467068 +n03476684 +n03476991 +n03478589 +n03481172 +n03482405 +n03483316 +n03485407 +n03485794 +n03492542 +n03494278 +n03495258 +n03496892 +n03498962 +n03527444 +n03529860 +n03530642 +n03532672 +n03534580 +n03535780 +n03538406 +n03544143 +n03584254 +n03584829 +n03590841 +n03594734 +n03594945 +n03595614 +n03598930 +n03599486 +n03602883 +n03617480 +n03623198 +n03627232 +n03630383 +n03633091 +n03637318 +n03642806 +n03649909 +n03657121 +n03658185 +n03661043 +n03662601 +n03666591 +n03670208 +n03673027 +n03676483 +n03680355 +n03690938 +n03691459 +n03692522 +n03697007 +n03706229 +n03709823 +n03710193 +n03710637 +n03710721 +n03717622 +n03720891 +n03721384 +n03724870 +n03729826 +n03733131 +n03733281 +n03733805 +n03742115 +n03743016 +n03759954 +n03761084 +n03763968 +n03764736 +n03769881 +n03770439 +n03770679 +n03773504 +n03775071 +n03775546 +n03776460 +n03777568 +n03777754 +n03781244 +n03782006 +n03785016 +n03786901 +n03787032 +n03788195 +n03788365 +n03791053 +n03792782 +n03792972 +n03793489 +n03794056 +n03796401 +n03803284 +n03804744 +n03814639 +n03814906 +n03825788 +n03832673 +n03837869 +n03838899 +n03840681 +n03841143 +n03843555 +n03854065 +n03857828 +n03866082 +n03868242 +n03868863 +n03871628 +n03873416 +n03874293 +n03874599 +n03876231 +n03877472 +n03877845 +n03884397 +n03887697 +n03888257 +n03888605 +n03891251 +n03891332 +n03895866 +n03899768 +n03902125 +n03903868 +n03908618 +n03908714 +n03916031 +n03920288 +n03924679 +n03929660 +n03929855 +n03930313 +n03930630 +n03933933 +n03935335 +n03937543 +n03938244 +n03942813 +n03944341 +n03947888 +n03950228 +n03954731 +n03956157 +n03958227 +n03961711 +n03967562 +n03970156 +n03976467 +n03976657 +n03977966 +n03980874 +n03982430 +n03983396 +n03991062 +n03992509 +n03995372 +n03998194 +n04004767 +n04005630 +n04008634 +n04009552 +n04019541 +n04023962 +n04026417 +n04033901 +n04033995 +n04037443 +n04039381 +n04040759 +n04041544 +n04044716 +n04049303 +n04065272 +n04067472 +n04069434 +n04070727 +n04074963 +n04081281 +n04086273 +n04090263 +n04099969 +n04111531 +n04116512 +n04118538 +n04118776 +n04120489 +n04125021 +n04127249 +n04131690 +n04133789 +n04136333 +n04141076 +n04141327 +n04141975 +n04146614 +n04147183 +n04149813 +n04152593 +n04153751 +n04154565 +n04162706 +n04179913 +n04192698 +n04200800 +n04201297 +n04204238 +n04204347 +n04208210 +n04209133 +n04209239 +n04228054 +n04229816 +n04235860 +n04238763 +n04239074 +n04243546 +n04251144 +n04252077 +n04252225 +n04254120 +n04254680 +n04254777 +n04258138 +n04259630 +n04263257 +n04264628 +n04265275 +n04266014 +n04270147 +n04273569 +n04275548 +n04277352 +n04285008 +n04286575 +n04296562 +n04310018 +n04311004 +n04311174 +n04317175 +n04325704 +n04326547 +n04328186 +n04330267 +n04332243 +n04335435 +n04336792 +n04344873 +n04346328 +n04347754 +n04350905 +n04355338 +n04355933 +n04356056 +n04357314 +n04366367 +n04367480 +n04370456 +n04371430 +n04371774 +n04372370 +n04376876 +n04380533 +n04389033 +n04392985 +n04398044 +n04399382 +n04404412 +n04409515 +n04417672 +n04418357 +n04423845 +n04428191 +n04429376 +n04435653 +n04442312 +n04443257 +n04447861 +n04456115 +n04458633 +n04461696 +n04462240 +n04465501 +n04467665 +n04476259 +n04479046 +n04482393 +n04483307 +n04485082 +n04486054 +n04487081 +n04487394 +n04493381 +n04501370 +n04505470 +n04507155 +n04509417 +n04515003 +n04517823 +n04522168 +n04523525 +n04525038 +n04525305 +n04532106 +n04532670 +n04536866 +n04540053 +n04542943 +n04548280 +n04548362 +n04550184 +n04552348 +n04553703 +n04554684 +n04557648 +n04560804 +n04562935 +n04579145 +n04579432 +n04584207 +n04589890 +n04590129 +n04591157 +n04591713 +n04592741 +n04596742 +n04597913 +n04599235 +n04604644 +n04606251 +n04612504 +n04613696 +n06359193 +n06596364 +n06785654 +n06794110 +n06874185 +n07248320 +n07565083 +n07579787 +n07583066 +n07584110 +n07590611 +n07613480 +n07614500 +n07615774 +n07684084 +n07693725 +n07695742 +n07697313 +n07697537 +n07711569 +n07714571 +n07714990 +n07715103 +n07716358 +n07716906 +n07717410 +n07717556 +n07718472 +n07718747 +n07720875 +n07730033 +n07734744 +n07742313 +n07745940 +n07747607 +n07749582 +n07753113 +n07753275 +n07753592 +n07754684 +n07760859 +n07768694 +n07802026 +n07831146 +n07836838 +n07860988 +n07871810 +n07873807 +n07875152 +n07880968 +n07892512 +n07920052 +n07930864 +n07932039 +n09193705 +n09229709 +n09246464 +n09256479 +n09288635 +n09332890 +n09399592 +n09421951 +n09428293 +n09468604 +n09472597 +n09835506 +n10148035 +n10565667 +n11879895 +n11939491 +n12057211 +n12144580 +n12267677 +n12620546 +n12768682 +n12985857 +n12998815 +n13037406 +n13040303 +n13044778 +n13052670 +n13054560 +n13133613 +n15075141 diff --git a/mobilenetv1/data/learning_rate.txt b/mobilenetv1/data/learning_rate.txt new file mode 100644 index 0000000..c9586ec --- /dev/null +++ b/mobilenetv1/data/learning_rate.txt @@ -0,0 +1,5 @@ +# Learning rate schedule +# Maps an epoch number to a learning rate +0: 0.01 +11: 0.001 +50: 0.0001 \ No newline at end of file diff --git a/mobilenetv1/data/names.list b/mobilenetv1/data/names.list new file mode 100644 index 0000000..a509c00 --- /dev/null +++ b/mobilenetv1/data/names.list @@ -0,0 +1,1000 @@ +tench, Tinca tinca +goldfish, Carassius auratus +great white shark, white shark, man-eater, man-eating shark, Carcharodon carcharias +tiger shark, Galeocerdo cuvieri +hammerhead, hammerhead shark +electric ray, crampfish, numbfish, torpedo +stingray +cock +hen +ostrich, Struthio camelus +brambling, Fringilla montifringilla +goldfinch, Carduelis carduelis +house finch, linnet, Carpodacus mexicanus +junco, snowbird +indigo bunting, indigo finch, indigo bird, Passerina cyanea +robin, American robin, Turdus migratorius +bulbul +jay +magpie +chickadee +water ouzel, dipper +kite +bald eagle, American eagle, Haliaeetus leucocephalus +vulture +great grey owl, great gray owl, Strix nebulosa +European fire salamander, Salamandra salamandra +common newt, Triturus vulgaris +eft +spotted salamander, Ambystoma maculatum +axolotl, mud puppy, Ambystoma mexicanum +bullfrog, Rana catesbeiana +tree frog, tree-frog +tailed frog, bell toad, ribbed toad, tailed toad, Ascaphus trui +loggerhead, loggerhead turtle, Caretta caretta +leatherback turtle, leatherback, leathery turtle, Dermochelys coriacea +mud turtle +terrapin +box turtle, box tortoise +banded gecko +common iguana, iguana, Iguana iguana +American chameleon, anole, Anolis carolinensis +whiptail, whiptail lizard +agama +frilled lizard, Chlamydosaurus kingi +alligator lizard +Gila monster, Heloderma suspectum +green lizard, Lacerta viridis +African chameleon, Chamaeleo chamaeleon +Komodo dragon, Komodo lizard, dragon lizard, giant lizard, Varanus komodoensis +African crocodile, Nile crocodile, Crocodylus niloticus +American alligator, Alligator mississipiensis +triceratops +thunder snake, worm snake, Carphophis amoenus +ringneck snake, ring-necked snake, ring snake +hognose snake, puff adder, sand viper +green snake, grass snake +king snake, kingsnake +garter snake, grass snake +water snake +vine snake +night snake, Hypsiglena torquata +boa constrictor, Constrictor constrictor +rock python, rock snake, Python sebae +Indian cobra, Naja naja +green mamba +sea snake +horned viper, cerastes, sand viper, horned asp, Cerastes cornutus +diamondback, diamondback rattlesnake, Crotalus adamanteus +sidewinder, horned rattlesnake, Crotalus cerastes +trilobite +harvestman, daddy longlegs, Phalangium opilio +scorpion +black and gold garden spider, Argiope aurantia +barn spider, Araneus cavaticus +garden spider, Aranea diademata +black widow, Latrodectus mactans +tarantula +wolf spider, hunting spider +tick +centipede +black grouse +ptarmigan +ruffed grouse, partridge, Bonasa umbellus +prairie chicken, prairie grouse, prairie fowl +peacock +quail +partridge +African grey, African gray, Psittacus erithacus +macaw +sulphur-crested cockatoo, Kakatoe galerita, Cacatua galerita +lorikeet +coucal +bee eater +hornbill +hummingbird +jacamar +toucan +drake +red-breasted merganser, Mergus serrator +goose +black swan, Cygnus atratus +tusker +echidna, spiny anteater, anteater +platypus, duckbill, duckbilled platypus, duck-billed platypus, Ornithorhynchus anatinus +wallaby, brush kangaroo +koala, koala bear, kangaroo bear, native bear, Phascolarctos cinereus +wombat +jellyfish +sea anemone, anemone +brain coral +flatworm, platyhelminth +nematode, nematode worm, roundworm +conch +snail +slug +sea slug, nudibranch +chiton, coat-of-mail shell, sea cradle, polyplacophore +chambered nautilus, pearly nautilus, nautilus +Dungeness crab, Cancer magister +rock crab, Cancer irroratus +fiddler crab +king crab, Alaska crab, Alaskan king crab, Alaska king crab, Paralithodes camtschatica +American lobster, Northern lobster, Maine lobster, Homarus americanus +spiny lobster, langouste, rock lobster, crawfish, crayfish, sea crawfish +crayfish, crawfish, crawdad, crawdaddy +hermit crab +isopod +white stork, Ciconia ciconia +black stork, Ciconia nigra +spoonbill +flamingo +little blue heron, Egretta caerulea +American egret, great white heron, Egretta albus +bittern +crane +limpkin, Aramus pictus +European gallinule, Porphyrio porphyrio +American coot, marsh hen, mud hen, water hen, Fulica americana +bustard +ruddy turnstone, Arenaria interpres +red-backed sandpiper, dunlin, Erolia alpina +redshank, Tringa totanus +dowitcher +oystercatcher, oyster catcher +pelican +king penguin, Aptenodytes patagonica +albatross, mollymawk +grey whale, gray whale, devilfish, Eschrichtius gibbosus, Eschrichtius robustus +killer whale, killer, orca, grampus, sea wolf, Orcinus orca +dugong, Dugong dugon +sea lion +Chihuahua +Japanese spaniel +Maltese dog, Maltese terrier, Maltese +Pekinese, Pekingese, Peke +Shih-Tzu +Blenheim spaniel +papillon +toy terrier +Rhodesian ridgeback +Afghan hound, Afghan +basset, basset hound +beagle +bloodhound, sleuthhound +bluetick +black-and-tan coonhound +Walker hound, Walker foxhound +English foxhound +redbone +borzoi, Russian wolfhound +Irish wolfhound +Italian greyhound +whippet +Ibizan hound, Ibizan Podenco +Norwegian elkhound, elkhound +otterhound, otter hound +Saluki, gazelle hound +Scottish deerhound, deerhound +Weimaraner +Staffordshire bullterrier, Staffordshire bull terrier +American Staffordshire terrier, Staffordshire terrier, American pit bull terrier, pit bull terrier +Bedlington terrier +Border terrier +Kerry blue terrier +Irish terrier +Norfolk terrier +Norwich terrier +Yorkshire terrier +wire-haired fox terrier +Lakeland terrier +Sealyham terrier, Sealyham +Airedale, Airedale terrier +cairn, cairn terrier +Australian terrier +Dandie Dinmont, Dandie Dinmont terrier +Boston bull, Boston terrier +miniature schnauzer +giant schnauzer +standard schnauzer +Scotch terrier, Scottish terrier, Scottie +Tibetan terrier, chrysanthemum dog +silky terrier, Sydney silky +soft-coated wheaten terrier +West Highland white terrier +Lhasa, Lhasa apso +flat-coated retriever +curly-coated retriever +golden retriever +Labrador retriever +Chesapeake Bay retriever +German short-haired pointer +vizsla, Hungarian pointer +English setter +Irish setter, red setter +Gordon setter +Brittany spaniel +clumber, clumber spaniel +English springer, English springer spaniel +Welsh springer spaniel +cocker spaniel, English cocker spaniel, cocker +Sussex spaniel +Irish water spaniel +kuvasz +schipperke +groenendael +malinois +briard +kelpie +komondor +Old English sheepdog, bobtail +Shetland sheepdog, Shetland sheep dog, Shetland +collie +Border collie +Bouvier des Flandres, Bouviers des Flandres +Rottweiler +German shepherd, German shepherd dog, German police dog, alsatian +Doberman, Doberman pinscher +miniature pinscher +Greater Swiss Mountain dog +Bernese mountain dog +Appenzeller +EntleBucher +boxer +bull mastiff +Tibetan mastiff +French bulldog +Great Dane +Saint Bernard, St Bernard +Eskimo dog, husky +malamute, malemute, Alaskan malamute +Siberian husky +dalmatian, coach dog, carriage dog +affenpinscher, monkey pinscher, monkey dog +basenji +pug, pug-dog +Leonberg +Newfoundland, Newfoundland dog +Great Pyrenees +Samoyed, Samoyede +Pomeranian +chow, chow chow +keeshond +Brabancon griffon +Pembroke, Pembroke Welsh corgi +Cardigan, Cardigan Welsh corgi +toy poodle +miniature poodle +standard poodle +Mexican hairless +timber wolf, grey wolf, gray wolf, Canis lupus +white wolf, Arctic wolf, Canis lupus tundrarum +red wolf, maned wolf, Canis rufus, Canis niger +coyote, prairie wolf, brush wolf, Canis latrans +dingo, warrigal, warragal, Canis dingo +dhole, Cuon alpinus +African hunting dog, hyena dog, Cape hunting dog, Lycaon pictus +hyena, hyaena +red fox, Vulpes vulpes +kit fox, Vulpes macrotis +Arctic fox, white fox, Alopex lagopus +grey fox, gray fox, Urocyon cinereoargenteus +tabby, tabby cat +tiger cat +Persian cat +Siamese cat, Siamese +Egyptian cat +cougar, puma, catamount, mountain lion, painter, panther, Felis concolor +lynx, catamount +leopard, Panthera pardus +snow leopard, ounce, Panthera uncia +jaguar, panther, Panthera onca, Felis onca +lion, king of beasts, Panthera leo +tiger, Panthera tigris +cheetah, chetah, Acinonyx jubatus +brown bear, bruin, Ursus arctos +American black bear, black bear, Ursus americanus, Euarctos americanus +ice bear, polar bear, Ursus Maritimus, Thalarctos maritimus +sloth bear, Melursus ursinus, Ursus ursinus +mongoose +meerkat, mierkat +tiger beetle +ladybug, ladybeetle, lady beetle, ladybird, ladybird beetle +ground beetle, carabid beetle +long-horned beetle, longicorn, longicorn beetle +leaf beetle, chrysomelid +dung beetle +rhinoceros beetle +weevil +fly +bee +ant, emmet, pismire +grasshopper, hopper +cricket +walking stick, walkingstick, stick insect +cockroach, roach +mantis, mantid +cicada, cicala +leafhopper +lacewing, lacewing fly +dragonfly, darning needle, devil's darning needle, sewing needle, snake feeder, snake doctor, mosquito hawk, skeeter hawk +damselfly +admiral +ringlet, ringlet butterfly +monarch, monarch butterfly, milkweed butterfly, Danaus plexippus +cabbage butterfly +sulphur butterfly, sulfur butterfly +lycaenid, lycaenid butterfly +starfish, sea star +sea urchin +sea cucumber, holothurian +wood rabbit, cottontail, cottontail rabbit +hare +Angora, Angora rabbit +hamster +porcupine, hedgehog +fox squirrel, eastern fox squirrel, Sciurus niger +marmot +beaver +guinea pig, Cavia cobaya +sorrel +zebra +hog, pig, grunter, squealer, Sus scrofa +wild boar, boar, Sus scrofa +warthog +hippopotamus, hippo, river horse, Hippopotamus amphibius +ox +water buffalo, water ox, Asiatic buffalo, Bubalus bubalis +bison +ram, tup +bighorn, bighorn sheep, cimarron, Rocky Mountain bighorn, Rocky Mountain sheep, Ovis canadensis +ibex, Capra ibex +hartebeest +impala, Aepyceros melampus +gazelle +Arabian camel, dromedary, Camelus dromedarius +llama +weasel +mink +polecat, fitch, foulmart, foumart, Mustela putorius +black-footed ferret, ferret, Mustela nigripes +otter +skunk, polecat, wood pussy +badger +armadillo +three-toed sloth, ai, Bradypus tridactylus +orangutan, orang, orangutang, Pongo pygmaeus +gorilla, Gorilla gorilla +chimpanzee, chimp, Pan troglodytes +gibbon, Hylobates lar +siamang, Hylobates syndactylus, Symphalangus syndactylus +guenon, guenon monkey +patas, hussar monkey, Erythrocebus patas +baboon +macaque +langur +colobus, colobus monkey +proboscis monkey, Nasalis larvatus +marmoset +capuchin, ringtail, Cebus capucinus +howler monkey, howler +titi, titi monkey +spider monkey, Ateles geoffroyi +squirrel monkey, Saimiri sciureus +Madagascar cat, ring-tailed lemur, Lemur catta +indri, indris, Indri indri, Indri brevicaudatus +Indian elephant, Elephas maximus +African elephant, Loxodonta africana +lesser panda, red panda, panda, bear cat, cat bear, Ailurus fulgens +giant panda, panda, panda bear, coon bear, Ailuropoda melanoleuca +barracouta, snoek +eel +coho, cohoe, coho salmon, blue jack, silver salmon, Oncorhynchus kisutch +rock beauty, Holocanthus tricolor +anemone fish +sturgeon +gar, garfish, garpike, billfish, Lepisosteus osseus +lionfish +puffer, pufferfish, blowfish, globefish +abacus +abaya +academic gown, academic robe, judge's robe +accordion, piano accordion, squeeze box +acoustic guitar +aircraft carrier, carrier, flattop, attack aircraft carrier +airliner +airship, dirigible +altar +ambulance +amphibian, amphibious vehicle +analog clock +apiary, bee house +apron +ashcan, trash can, garbage can, wastebin, ash bin, ash-bin, ashbin, dustbin, trash barrel, trash bin +assault rifle, assault gun +backpack, back pack, knapsack, packsack, rucksack, haversack +bakery, bakeshop, bakehouse +balance beam, beam +balloon +ballpoint, ballpoint pen, ballpen, Biro +Band Aid +banjo +bannister, banister, balustrade, balusters, handrail +barbell +barber chair +barbershop +barn +barometer +barrel, cask +barrow, garden cart, lawn cart, wheelbarrow +baseball +basketball +bassinet +bassoon +bathing cap, swimming cap +bath towel +bathtub, bathing tub, bath, tub +beach wagon, station wagon, wagon, estate car, beach waggon, station waggon, waggon +beacon, lighthouse, beacon light, pharos +beaker +bearskin, busby, shako +beer bottle +beer glass +bell cote, bell cot +bib +bicycle-built-for-two, tandem bicycle, tandem +bikini, two-piece +binder, ring-binder +binoculars, field glasses, opera glasses +birdhouse +boathouse +bobsled, bobsleigh, bob +bolo tie, bolo, bola tie, bola +bonnet, poke bonnet +bookcase +bookshop, bookstore, bookstall +bottlecap +bow +bow tie, bow-tie, bowtie +brass, memorial tablet, plaque +brassiere, bra, bandeau +breakwater, groin, groyne, mole, bulwark, seawall, jetty +breastplate, aegis, egis +broom +bucket, pail +buckle +bulletproof vest +bullet train, bullet +butcher shop, meat market +cab, hack, taxi, taxicab +caldron, cauldron +candle, taper, wax light +cannon +canoe +can opener, tin opener +cardigan +car mirror +carousel, carrousel, merry-go-round, roundabout, whirligig +carpenter's kit, tool kit +carton +car wheel +cash machine, cash dispenser, automated teller machine, automatic teller machine, automated teller, automatic teller, ATM +cassette +cassette player +castle +catamaran +CD player +cello, violoncello +cellular telephone, cellular phone, cellphone, cell, mobile phone +chain +chainlink fence +chain mail, ring mail, mail, chain armor, chain armour, ring armor, ring armour +chain saw, chainsaw +chest +chiffonier, commode +chime, bell, gong +china cabinet, china closet +Christmas stocking +church, church building +cinema, movie theater, movie theatre, movie house, picture palace +cleaver, meat cleaver, chopper +cliff dwelling +cloak +clog, geta, patten, sabot +cocktail shaker +coffee mug +coffeepot +coil, spiral, volute, whorl, helix +combination lock +computer keyboard, keypad +confectionery, confectionary, candy store +container ship, containership, container vessel +convertible +corkscrew, bottle screw +cornet, horn, trumpet, trump +cowboy boot +cowboy hat, ten-gallon hat +cradle +crane +crash helmet +crate +crib, cot +Crock Pot +croquet ball +crutch +cuirass +dam, dike, dyke +desk +desktop computer +dial telephone, dial phone +diaper, nappy, napkin +digital clock +digital watch +dining table, board +dishrag, dishcloth +dishwasher, dish washer, dishwashing machine +disk brake, disc brake +dock, dockage, docking facility +dogsled, dog sled, dog sleigh +dome +doormat, welcome mat +drilling platform, offshore rig +drum, membranophone, tympan +drumstick +dumbbell +Dutch oven +electric fan, blower +electric guitar +electric locomotive +entertainment center +envelope +espresso maker +face powder +feather boa, boa +file, file cabinet, filing cabinet +fireboat +fire engine, fire truck +fire screen, fireguard +flagpole, flagstaff +flute, transverse flute +folding chair +football helmet +forklift +fountain +fountain pen +four-poster +freight car +French horn, horn +frying pan, frypan, skillet +fur coat +garbage truck, dustcart +gasmask, respirator, gas helmet +gas pump, gasoline pump, petrol pump, island dispenser +goblet +go-kart +golf ball +golfcart, golf cart +gondola +gong, tam-tam +gown +grand piano, grand +greenhouse, nursery, glasshouse +grille, radiator grille +grocery store, grocery, food market, market +guillotine +hair slide +hair spray +half track +hammer +hamper +hand blower, blow dryer, blow drier, hair dryer, hair drier +hand-held computer, hand-held microcomputer +handkerchief, hankie, hanky, hankey +hard disc, hard disk, fixed disk +harmonica, mouth organ, harp, mouth harp +harp +harvester, reaper +hatchet +holster +home theater, home theatre +honeycomb +hook, claw +hoopskirt, crinoline +horizontal bar, high bar +horse cart, horse-cart +hourglass +iPod +iron, smoothing iron +jack-o'-lantern +jean, blue jean, denim +jeep, landrover +jersey, T-shirt, tee shirt +jigsaw puzzle +jinrikisha, ricksha, rickshaw +joystick +kimono +knee pad +knot +lab coat, laboratory coat +ladle +lampshade, lamp shade +laptop, laptop computer +lawn mower, mower +lens cap, lens cover +letter opener, paper knife, paperknife +library +lifeboat +lighter, light, igniter, ignitor +limousine, limo +liner, ocean liner +lipstick, lip rouge +Loafer +lotion +loudspeaker, speaker, speaker unit, loudspeaker system, speaker system +loupe, jeweler's loupe +lumbermill, sawmill +magnetic compass +mailbag, postbag +mailbox, letter box +maillot +maillot, tank suit +manhole cover +maraca +marimba, xylophone +mask +matchstick +maypole +maze, labyrinth +measuring cup +medicine chest, medicine cabinet +megalith, megalithic structure +microphone, mike +microwave, microwave oven +military uniform +milk can +minibus +miniskirt, mini +minivan +missile +mitten +mixing bowl +mobile home, manufactured home +Model T +modem +monastery +monitor +moped +mortar +mortarboard +mosque +mosquito net +motor scooter, scooter +mountain bike, all-terrain bike, off-roader +mountain tent +mouse, computer mouse +mousetrap +moving van +muzzle +nail +neck brace +necklace +nipple +notebook, notebook computer +obelisk +oboe, hautboy, hautbois +ocarina, sweet potato +odometer, hodometer, mileometer, milometer +oil filter +organ, pipe organ +oscilloscope, scope, cathode-ray oscilloscope, CRO +overskirt +oxcart +oxygen mask +packet +paddle, boat paddle +paddlewheel, paddle wheel +padlock +paintbrush +pajama, pyjama, pj's, jammies +palace +panpipe, pandean pipe, syrinx +paper towel +parachute, chute +parallel bars, bars +park bench +parking meter +passenger car, coach, carriage +patio, terrace +pay-phone, pay-station +pedestal, plinth, footstall +pencil box, pencil case +pencil sharpener +perfume, essence +Petri dish +photocopier +pick, plectrum, plectron +pickelhaube +picket fence, paling +pickup, pickup truck +pier +piggy bank, penny bank +pill bottle +pillow +ping-pong ball +pinwheel +pirate, pirate ship +pitcher, ewer +plane, carpenter's plane, woodworking plane +planetarium +plastic bag +plate rack +plow, plough +plunger, plumber's helper +Polaroid camera, Polaroid Land camera +pole +police van, police wagon, paddy wagon, patrol wagon, wagon, black Maria +poncho +pool table, billiard table, snooker table +pop bottle, soda bottle +pot, flowerpot +potter's wheel +power drill +prayer rug, prayer mat +printer +prison, prison house +projectile, missile +projector +puck, hockey puck +punching bag, punch bag, punching ball, punchball +purse +quill, quill pen +quilt, comforter, comfort, puff +racer, race car, racing car +racket, racquet +radiator +radio, wireless +radio telescope, radio reflector +rain barrel +recreational vehicle, RV, R.V. +reel +reflex camera +refrigerator, icebox +remote control, remote +restaurant, eating house, eating place, eatery +revolver, six-gun, six-shooter +rifle +rocking chair, rocker +rotisserie +rubber eraser, rubber, pencil eraser +rugby ball +rule, ruler +running shoe +safe +safety pin +saltshaker, salt shaker +sandal +sarong +sax, saxophone +scabbard +scale, weighing machine +school bus +schooner +scoreboard +screen, CRT screen +screw +screwdriver +seat belt, seatbelt +sewing machine +shield, buckler +shoe shop, shoe-shop, shoe store +shoji +shopping basket +shopping cart +shovel +shower cap +shower curtain +ski +ski mask +sleeping bag +slide rule, slipstick +sliding door +slot, one-armed bandit +snorkel +snowmobile +snowplow, snowplough +soap dispenser +soccer ball +sock +solar dish, solar collector, solar furnace +sombrero +soup bowl +space bar +space heater +space shuttle +spatula +speedboat +spider web, spider's web +spindle +sports car, sport car +spotlight, spot +stage +steam locomotive +steel arch bridge +steel drum +stethoscope +stole +stone wall +stopwatch, stop watch +stove +strainer +streetcar, tram, tramcar, trolley, trolley car +stretcher +studio couch, day bed +stupa, tope +submarine, pigboat, sub, U-boat +suit, suit of clothes +sundial +sunglass +sunglasses, dark glasses, shades +sunscreen, sunblock, sun blocker +suspension bridge +swab, swob, mop +sweatshirt +swimming trunks, bathing trunks +swing +switch, electric switch, electrical switch +syringe +table lamp +tank, army tank, armored combat vehicle, armoured combat vehicle +tape player +teapot +teddy, teddy bear +television, television system +tennis ball +thatch, thatched roof +theater curtain, theatre curtain +thimble +thresher, thrasher, threshing machine +throne +tile roof +toaster +tobacco shop, tobacconist shop, tobacconist +toilet seat +torch +totem pole +tow truck, tow car, wrecker +toyshop +tractor +trailer truck, tractor trailer, trucking rig, rig, articulated lorry, semi +tray +trench coat +tricycle, trike, velocipede +trimaran +tripod +triumphal arch +trolleybus, trolley coach, trackless trolley +trombone +tub, vat +turnstile +typewriter keyboard +umbrella +unicycle, monocycle +upright, upright piano +vacuum, vacuum cleaner +vase +vault +velvet +vending machine +vestment +viaduct +violin, fiddle +volleyball +waffle iron +wall clock +wallet, billfold, notecase, pocketbook +wardrobe, closet, press +warplane, military plane +washbasin, handbasin, washbowl, lavabo, wash-hand basin +washer, automatic washer, washing machine +water bottle +water jug +water tower +whiskey jug +whistle +wig +window screen +window shade +Windsor tie +wine bottle +wing +wok +wooden spoon +wool, woolen, woollen +worm fence, snake fence, snake-rail fence, Virginia fence +wreck +yawl +yurt +web site, website, internet site, site +comic book +crossword puzzle, crossword +street sign +traffic light, traffic signal, stoplight +book jacket, dust cover, dust jacket, dust wrapper +menu +plate +guacamole +consomme +hot pot, hotpot +trifle +ice cream, icecream +ice lolly, lolly, lollipop, popsicle +French loaf +bagel, beigel +pretzel +cheeseburger +hotdog, hot dog, red hot +mashed potato +head cabbage +broccoli +cauliflower +zucchini, courgette +spaghetti squash +acorn squash +butternut squash +cucumber, cuke +artichoke, globe artichoke +bell pepper +cardoon +mushroom +Granny Smith +strawberry +orange +lemon +fig +pineapple, ananas +banana +jackfruit, jak, jack +custard apple +pomegranate +hay +carbonara +chocolate sauce, chocolate syrup +dough +meat loaf, meatloaf +pizza, pizza pie +potpie +burrito +red wine +espresso +cup +eggnog +alp +bubble +cliff, drop, drop-off +coral reef +geyser +lakeside, lakeshore +promontory, headland, head, foreland +sandbar, sand bar +seashore, coast, seacoast, sea-coast +valley, vale +volcano +ballplayer, baseball player +groom, bridegroom +scuba diver +rapeseed +daisy +yellow lady's slipper, yellow lady-slipper, Cypripedium calceolus, Cypripedium parviflorum +corn +acorn +hip, rose hip, rosehip +buckeye, horse chestnut, conker +coral fungus +agaric +gyromitra +stinkhorn, carrion fungus +earthstar +hen-of-the-woods, hen of the woods, Polyporus frondosus, Grifola frondosa +bolete +ear, spike, capitulum +toilet tissue, toilet paper, bathroom tissue diff --git a/mobilenetv1/freeze_graph.py b/mobilenetv1/freeze_graph.py new file mode 100644 index 0000000..b4ea0cb --- /dev/null +++ b/mobilenetv1/freeze_graph.py @@ -0,0 +1,87 @@ +import tensorflow as tf +import os +import re +import sys +import argparse +import base_func +import importlib +import models +from tensorflow.python.framework import graph_util + +def freeze_graph(model_def,input_dir, output_graph): + sess = tf.InteractiveSession() + meta_file, ckpt_file = base_func.get_model_filenames(input_dir) + + network = importlib.import_module(model_def) + + images_placeholder = tf.placeholder(tf.float32,shape=(None,224,224,3),name='input') + + logits, _ = network.inference(images_placeholder, keep_probability=0, + phase_train=False, class_num=1000) + + + ckpt_dir_exp = os.path.expanduser(input_dir) + + + meta_file = os.path.join(ckpt_dir_exp, meta_file) + ckpt_file = os.path.join(ckpt_dir_exp, ckpt_file) + + print("meta-file is %s" % meta_file) + + + saver = tf.train.Saver(tf.global_variables()) + + + graph = tf.get_default_graph() # 获得默认的图 + input_graph_def = graph.as_graph_def() # 返回一个序列化的图代表当前的图 + + + output_node_names = "MobileNetV1/Bottleneck2/BatchNorm/Reshape_1" + + + with tf.Session() as sess: + + saver.restore(sess, ckpt_file) #恢复图并得到数据 + # sess.run(embeddings,feed_dict=feed_dict) + + + # fix batch norm nodes + for node in input_graph_def.node: + if node.op == 'RefSwitch': + node.op = 'Switch' + for index in range(len(node.input)): + if 'moving_' in node.input[index]: + node.input[index] = node.input[index] + '/read' + elif node.op == 'AssignSub': + node.op = 'Sub' + if 'use_locking' in node.attr: del node.attr['use_locking'] + elif node.op == 'AssignAdd': + node.op = 'Add' + if 'use_locking' in node.attr: del node.attr['use_locking'] + + + output_graph_def = graph_util.convert_variables_to_constants( + sess=sess, + input_graph_def=input_graph_def, + output_node_names=output_node_names.split(","))# 如果有多个输出节点,以逗号隔开 + with tf.gfile.GFile(output_graph, "wb") as f: #保存模型 + f.write(output_graph_def.SerializeToString()) #序列化输出 + print("%d ops in the final graph." % len(output_graph_def.node)) #得到当前图有几个操作节点 # for op in graph.get_operations(): # print(op.name, op.values()) + + +def main(args): + freeze_graph(args.model_def,args.ckpt_dir,args.output_file) + +def parse_arguments(argv): + parser = argparse.ArgumentParser() + parser.add_argument('--model_def', type=str, + help='Model definition. Points to a module containing the definition of the inference graph.', default='models.mobilenet_v1') + parser.add_argument('ckpt_dir', type=str, + help='Directory containing the metagraph (.meta) file and the checkpoint (ckpt) file containing model parameters') + parser.add_argument('output_file', type=str, + help='Filename for the exported graphdef protobuf (.pb)') + return parser.parse_args(argv) + + +if __name__ == '__main__': + main(parse_arguments(sys.argv[1:])) diff --git a/mobilenetv1/models/__init__.py b/mobilenetv1/models/__init__.py new file mode 100644 index 0000000..efa6252 --- /dev/null +++ b/mobilenetv1/models/__init__.py @@ -0,0 +1,2 @@ +# flake8: noqa + diff --git a/mobilenetv1/models/mobilenet_v1.py b/mobilenetv1/models/mobilenet_v1.py new file mode 100644 index 0000000..6ca99d0 --- /dev/null +++ b/mobilenetv1/models/mobilenet_v1.py @@ -0,0 +1,159 @@ +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import tensorflow as tf +import tensorflow.contrib.slim as slim + +def inference(images, keep_probability, phase_train=True, + class_num=1000, weight_decay=0.0, reuse=None): + batch_norm_params = { + 'decay': 0.995, + 'epsilon': 0.001, + 'updates_collections': None, + 'variables_collections': [ tf.GraphKeys.TRAINABLE_VARIABLES ], +} + with slim.arg_scope([slim.conv2d, slim.separable_conv2d,slim.fully_connected], + weights_initializer=slim.initializers.xavier_initializer(), + weights_regularizer=slim.l2_regularizer(weight_decay), + normalizer_fn=slim.batch_norm, + normalizer_params=batch_norm_params): + return mobilenet_v1(images, is_training=phase_train, + dropout_keep_prob=keep_probability, class_num=class_num, reuse=reuse) + + + +def block_14x14(net, outputs, scope=None, reuse=None): + with tf.variable_scope(scope, "block-14x14", reuse=reuse): + net = slim.separable_conv2d(net, None, [3, 3], + depth_multiplier=1, + stride=1, + padding='SAME', + normalizer_fn=slim.batch_norm, + scope='dw_Conv2d') + net = slim.conv2d(net, outputs, 1, stride=1, padding='SAME',normalizer_fn=slim.batch_norm,scope='conv') + return net + +def mobilenet_v1(inputs, is_training=True, + dropout_keep_prob=0.5, + class_num=1000, + reuse=None, + scope='MobileNetV1'): + end_points = {} + net = None + _l = 0 + with tf.variable_scope(scope, 'MobileNetV1', [inputs], reuse=reuse): + with slim.arg_scope([slim.batch_norm, slim.dropout], + is_training=is_training): + with slim.arg_scope([slim.conv2d,slim.separable_conv2d], + stride=1, padding='SAME',normalizer_fn=slim.batch_norm): + inputs = tf.space_to_batch(inputs,[[1,1],[1,1]],block_size=1,name=None) + # ------------------------x224------------------------- # + net = slim.conv2d(inputs, 32, 3, stride=2,padding='VALID',normalizer_fn=slim.batch_norm, + scope='Conv2d_%i_3x3'%(_l)) + _l += 1 + # ------------------------x112------------------------- # + net = slim.separable_conv2d(net, None, [3, 3], + depth_multiplier=1, + stride=1, + normalizer_fn=slim.batch_norm, + scope='dw_Conv2d_%i_3x3'%(_l)) + _l += 1 + net = slim.conv2d(net, 64, 1, stride=1, padding='SAME',normalizer_fn=slim.batch_norm, + scope='Conv2d_%i_1x1'%(_l)) + _l += 1 + net = tf.space_to_batch(net,[[1,1],[1,1]],block_size=1,name=None) + net = slim.separable_conv2d(net, None, [3, 3], + depth_multiplier=1, + stride=2, + padding='VALID', + normalizer_fn=slim.batch_norm, + scope='dw_Conv2d_%i_3x3'%(_l)) + _l += 1 + # ------------------------x56-------------------------- # + net = slim.conv2d(net, 128, 1, stride=1, padding='SAME',normalizer_fn=slim.batch_norm, + scope='Conv2d_%i_1x1'%(_l)) + _l += 1 + net = slim.separable_conv2d(net, None, [3, 3], + depth_multiplier=1, + stride=1, + normalizer_fn=slim.batch_norm, + scope='dw_Conv2d_%i_3x3'%(_l)) + _l += 1 + net = slim.conv2d(net, 128, 1, stride=1, padding='SAME',normalizer_fn=slim.batch_norm, + scope='Conv2d_%i_1x1'%(_l)) + _l += 1 + net = tf.space_to_batch(net,[[1,1],[1,1]],block_size=1,name=None) + net = slim.separable_conv2d(net, None, [3, 3], + depth_multiplier=1, + stride=2, + padding='VALID', + normalizer_fn=slim.batch_norm, + scope='dw_Conv2d_%i_3x3'%(_l)) + _l += 1 + # ------------------------x28-------------------------- # + net = slim.conv2d(net, 256, 1, stride=1, padding='SAME',normalizer_fn=slim.batch_norm, + scope='Conv2d_%i_1x1'%(_l)) + _l += 1 + net = slim.separable_conv2d(net, None, [3, 3], + depth_multiplier=1, + stride=1, + normalizer_fn=slim.batch_norm, + scope='dw_Conv2d_%i_3x3'%(_l)) + _l += 1 + net = slim.conv2d(net, 256, 1, stride=1, padding='SAME',normalizer_fn=slim.batch_norm, + scope='Conv2d_%i_1x1'%(_l)) + _l += 1 + net = tf.space_to_batch(net,[[1,1],[1,1]],block_size=1,name=None) + net = slim.separable_conv2d(net, None, [3, 3], + depth_multiplier=1, + stride=2, + padding='VALID', + normalizer_fn=slim.batch_norm, + scope='dw_Conv2d_%i_3x3'%(_l)) + _l += 1 + # ------------------------x14-------------------------- # + net = slim.conv2d(net, 512, 1, stride=1, padding='SAME',normalizer_fn=slim.batch_norm, + scope='Conv2d_%i_1x1'%(_l)) + _l += 1 + + with tf.variable_scope(scope,'block_repeat_%i'%(_l)): + for _k in range(5): + net = block_14x14(net,512) + _l += 1 + + net = tf.space_to_batch(net,[[1,1],[1,1]],block_size=1,name=None) + net = slim.separable_conv2d(net, None, [3, 3], + depth_multiplier=1, + stride=2, + padding='VALID', + normalizer_fn=slim.batch_norm, + scope='dw_Conv2d_%i_3x3'%(_l)) + _l += 1 + # -------------------------x7-------------------------- # + net = slim.conv2d(net, 1024, 1, stride=1, padding='SAME',normalizer_fn=slim.batch_norm, + scope='Conv2d_%i_1x1'%(_l)) + _l += 1 + net = slim.separable_conv2d(net, None, [3, 3], + depth_multiplier=1, + stride=1, + padding='SAME', + normalizer_fn=slim.batch_norm, + scope='dw_Conv2d_%i_3x3'%(_l)) + _l += 1 + net = slim.conv2d(net, 1024, 1, stride=1, padding='SAME',normalizer_fn=slim.batch_norm, + scope='Conv2d_%i_1x1'%(_l)) + _l += 1 + # ---------------------softmax out---------------------- # + net = slim.avg_pool2d(net, net.get_shape()[1:3], padding='VALID', + scope='AvgPool_%i'%(_l)) + _l += 1 + + net = slim.flatten(net) + net = slim.dropout(net, dropout_keep_prob, is_training=is_training, + scope='Dropout_1') + + net = slim.fully_connected(net, class_num, activation_fn=None, + scope='Bottleneck2', reuse=False) + + return net, None diff --git a/mobilenetv1/predict_one_pic.py b/mobilenetv1/predict_one_pic.py new file mode 100644 index 0000000..c5ed9ca --- /dev/null +++ b/mobilenetv1/predict_one_pic.py @@ -0,0 +1,91 @@ +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +from datetime import datetime +import os.path +from os import environ +import time +import sys +import random +import tensorflow as tf +import numpy as np +import importlib +import argparse +import base_func +import h5py +import math +import tensorflow.contrib.slim as slim +from tensorflow.python.ops import data_flow_ops +from tensorflow.python.framework import ops +from tensorflow.python.ops import array_ops +import pickle +from scipy import misc + +def _softmax(x, axis=-1, t=-100.): + x = x - np.max(x) + if np.min(x) < t: + x = x / np.min(x) * t + e_x = np.exp(x) + return e_x / e_x.sum(axis, keepdims=True) + +def main(args): + + image_size = (args.image_size, args.image_size) + + top1 = 0.0 + top5 = 0.0 + + with tf.Graph().as_default() as graph: + with tf.Session() as sess: + base_func.load_model(args.model) + + input_image = sess.graph.get_tensor_by_name('input:0') + output = sess.graph.get_tensor_by_name('MobileNetV1/Bottleneck2/BatchNorm/Reshape_1:0') + + if (os.path.isdir(args.model)): + phase_train_placeholder = tf.get_default_graph().get_tensor_by_name("phase_train:0") + + img = np.array(misc.imread(args.image, mode='RGB')) + # img = base_func.crop(img, False, args.image_size) + img = misc.imresize(img, image_size, interp='bilinear') + img = img / 255. + images = [img] + + feed_dict={input_image:images} + if (os.path.isdir(args.model)): + feed_dict={input_image:images,phase_train_placeholder:False} + + logits = sess.run(output,feed_dict=feed_dict) + pred = _softmax(logits[0,:]) + + des_idx = np.argsort(pred) + + with open("data/names.list","r") as f: + lines = f.readlines() + # with open("data/names2.list","w") as f1: + # for k in range(1000): + # f1.writelines(lines[k].split(":")[1]) + + for j in range(5): + print("%.2f%%--%s" % (pred[des_idx[999-j]]*100,lines[des_idx[999-j]].strip())) + + + +def parse_arguments(argv): + parser = argparse.ArgumentParser() + + parser.add_argument('model', type=str, + help='Model definition. Points to a module containing the definition of the inference graph.', default='models.inception_resnet_v1') + parser.add_argument('image', type=str, + help='Path to the data directory containing aligned face patches.', + default='data/eagle.jpg') + parser.add_argument('--image_size', type=int, + help='image size.', default=224) + + return parser.parse_args(argv) + + +if __name__ == '__main__': + args = parse_arguments(sys.argv[1:]) + main(args) diff --git a/mobilenetv1/run_mobilenet_v1.sh b/mobilenetv1/run_mobilenet_v1.sh new file mode 100755 index 0000000..7c47ba9 --- /dev/null +++ b/mobilenetv1/run_mobilenet_v1.sh @@ -0,0 +1,20 @@ +#!/bin/bash + +python3 train_softmax.py \ +--model_def models.mobilenet_v1 \ +--data_dir /data/datasets/ImageNet2012/ILSVRC2012_img_train/ \ +--pretrained_model "../pretrained/mobilenetv1_1.0.pb"\ +--gpu_memory_fraction 0.85 \ +--gpus 1 \ +--image_size 224 \ +--logs_base_dir backup_classifier \ +--models_base_dir backup_classifier \ +--batch_size 100 \ +--epoch_size 5000 \ +--learning_rate -1 \ +--max_nrof_epochs 50 \ +--class_num 1000 \ +--use_fixed_image_standardization \ +--optimizer MOM \ +--learning_rate_schedule_file data/learning_rate.txt \ +--keep_probability 1.0 diff --git a/mobilenetv1/train_softmax.py b/mobilenetv1/train_softmax.py new file mode 100644 index 0000000..a809f50 --- /dev/null +++ b/mobilenetv1/train_softmax.py @@ -0,0 +1,322 @@ +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +from datetime import datetime +import os.path +from os import environ +import time +import sys +import random +import tensorflow as tf +import numpy as np +import importlib +import argparse +import base_func +import h5py +import math +import tensorflow.contrib.slim as slim +from tensorflow.python.ops import data_flow_ops +from tensorflow.python.framework import ops +from tensorflow.python.ops import array_ops +import pickle +from scipy import misc + +def main(args): + environ['CUDA_VISIBLE_DEVICES'] = args.gpus + network = importlib.import_module(args.model_def) + image_size = (args.image_size, args.image_size) + + subdir = datetime.strftime(datetime.now(), '%Y%m%d-%H%M%S') + log_dir = os.path.join(os.path.expanduser(args.logs_base_dir), subdir) + if not os.path.isdir(log_dir): # Create the log directory if it doesn't exist + os.makedirs(log_dir) + model_dir = os.path.join(os.path.expanduser(args.models_base_dir), subdir) + if not os.path.isdir(model_dir): # Create the model directory if it doesn't exist + os.makedirs(model_dir) + + # Write arguments to a text file + base_func.write_arguments_to_file(args, os.path.join(log_dir, 'arguments.txt')) + + np.random.seed(seed=args.seed) + random.seed(args.seed) + dataset = base_func.get_dataset(args.data_dir) + + train_set, val_set = dataset, [] + + nrof_classes = len(train_set) + + print('Model directory: %s' % model_dir) + print('Log directory: %s' % log_dir) + pretrained_model = None + if args.pretrained_model: + pretrained_model = os.path.expanduser(args.pretrained_model) + print('Pre-trained model: %s' % pretrained_model) + + + with tf.Graph().as_default(): + tf.set_random_seed(args.seed) + global_step = tf.Variable(0, trainable=False) + + # Get a list of image paths and their labels + image_list, label_list = base_func.get_image_paths_and_labels(train_set) + + assert len(image_list)>0, 'The training set should not be empty' + + val_image_list, val_label_list = base_func.get_image_paths_and_labels(val_set) + + # Create a queue that produces indices into the image_list and label_list + labels = ops.convert_to_tensor(label_list, dtype=tf.int32) + range_size = array_ops.shape(labels)[0] + index_queue = tf.train.range_input_producer(range_size, num_epochs=None, + shuffle=True, seed=None, capacity=32) + + index_dequeue_op = index_queue.dequeue_many(args.batch_size*args.epoch_size, 'index_dequeue') + + learning_rate_placeholder = tf.placeholder(tf.float32, name='learning_rate') + batch_size_placeholder = tf.placeholder(tf.int32, name='batch_size') + phase_train_placeholder = tf.placeholder(tf.bool, name='phase_train') + image_paths_placeholder = tf.placeholder(tf.string, shape=(None,1), name='image_paths') + labels_placeholder = tf.placeholder(tf.int32, shape=(None,1), name='labels') + control_placeholder = tf.placeholder(tf.int32, shape=(None,1), name='control') + + nrof_preprocess_threads = 4 + input_queue = data_flow_ops.FIFOQueue(capacity=2000000, + dtypes=[tf.string, tf.int32, tf.int32], + shapes=[(1,), (1,), (1,)], + shared_name=None, name=None) + enqueue_op = input_queue.enqueue_many([image_paths_placeholder, labels_placeholder, control_placeholder], name='enqueue_op') + image_batch, label_batch = base_func.create_input_pipeline(input_queue, image_size, nrof_preprocess_threads, batch_size_placeholder) + + image_batch = tf.identity(image_batch, 'image_batch') + image_batch = tf.identity(image_batch, 'input') + label_batch = tf.identity(label_batch, 'label_batch') + + print('Number of classes in training set: %d' % nrof_classes) + print('Number of examples in training set: %d' % len(image_list)) + + print('Number of classes in validation set: %d' % len(val_set)) + print('Number of examples in validation set: %d' % len(val_image_list)) + + print('Building training graph') + + # Build the inference graph + logits, _ = network.inference(image_batch, args.keep_probability, + phase_train=phase_train_placeholder, class_num=args.class_num, + weight_decay=args.weight_decay) + + prelogits = logits + + print('class_num=%d' % len(train_set)) + + learning_rate = tf.train.exponential_decay(learning_rate_placeholder, global_step, + args.learning_rate_decay_epochs*args.epoch_size, args.learning_rate_decay_factor, staircase=True) + tf.summary.scalar('learning_rate', learning_rate) + + # Calculate the average cross entropy loss across the batch + cross_entropy = tf.nn.sparse_softmax_cross_entropy_with_logits( + labels=label_batch, logits=logits, name='cross_entropy_per_example') + cross_entropy_mean = tf.reduce_mean(cross_entropy, name='cross_entropy') + tf.add_to_collection('losses', cross_entropy_mean) + + correct_prediction = tf.cast(tf.equal(tf.argmax(logits, 1), tf.cast(label_batch, tf.int64)), tf.float32) + accuracy = tf.reduce_mean(correct_prediction) + + total_loss = tf.add_n([cross_entropy_mean],name='total_loss') + + # Build a Graph that trains the model with one batch of examples and updates the model parameters + train_op = base_func.train(total_loss, global_step, args.optimizer, + learning_rate, args.moving_average_decay, tf.global_variables(), args.log_histograms) + + # Create a saver + var_list = tf.trainable_variables() + g_list = tf.global_variables() + bn_moving_vars = [g for g in g_list if 'moving_mean' in g.name] + bn_moving_vars += [g for g in g_list if 'moving_variance' in g.name] + # var_list += bn_moving_vars + var_list = list(set(var_list+bn_moving_vars)) + + saver = tf.train.Saver(var_list=var_list, max_to_keep=10) + + if pretrained_model: + saver_restore = tf.train.Saver(var_list=var_list) + + # Start running operations on the Graph. + gpu_options = tf.GPUOptions(per_process_gpu_memory_fraction=args.gpu_memory_fraction) + sess = tf.Session(config=tf.ConfigProto(gpu_options=gpu_options, log_device_placement=False)) + sess.run(tf.global_variables_initializer()) + sess.run(tf.local_variables_initializer()) + summary_writer = tf.summary.FileWriter(log_dir, sess.graph) + coord = tf.train.Coordinator() + tf.train.start_queue_runners(coord=coord, sess=sess) + + with sess.as_default(): + + if pretrained_model: + print('Restoring pretrained model: %s' % pretrained_model) + saver_restore.restore(sess, tf.train.latest_checkpoint(pretrained_model)) + + # Training and validation loop + print('Running training') + nrof_steps = args.max_nrof_epochs*args.epoch_size + + for epoch in range(1,args.max_nrof_epochs+1): + step = sess.run(global_step, feed_dict=None) + # Train for one epoch + t = time.time() + cont = train(args, sess, epoch, image_list, label_list, index_dequeue_op, enqueue_op, image_paths_placeholder, labels_placeholder, + learning_rate_placeholder, phase_train_placeholder, batch_size_placeholder, control_placeholder, global_step, + total_loss, train_op, args.learning_rate_schedule_file, + cross_entropy_mean, accuracy, learning_rate, + prelogits, args.random_rotate, args.random_crop, args.random_flip, args.use_fixed_image_standardization) + # stat['time_train'][epoch-1] = time.time() - t + + if not cont: + break + + # Save variables and the metagraph if it doesn't exist already + save_variables_and_metagraph(sess, saver, summary_writer, model_dir, subdir, epoch) + + return model_dir + + +def train(args, sess, epoch, image_list, label_list, index_dequeue_op, enqueue_op, image_paths_placeholder, labels_placeholder, + learning_rate_placeholder, phase_train_placeholder, batch_size_placeholder, control_placeholder, step, + loss, train_op, learning_rate_schedule_file, + cross_entropy_mean, accuracy, + learning_rate, prelogits, random_rotate, random_crop, random_flip, use_fixed_image_standardization): + batch_number = 0 + + if args.learning_rate>0.0: + lr = args.learning_rate + else: + lr = base_func.get_learning_rate_from_file(learning_rate_schedule_file, epoch) + + if lr<=0: + return False + + index_epoch = sess.run(index_dequeue_op) + label_epoch = np.array(label_list)[index_epoch] + image_epoch = np.array(image_list)[index_epoch] + + # Enqueue one epoch of image paths and labels + labels_array = np.expand_dims(np.array(label_epoch),1) + image_paths_array = np.expand_dims(np.array(image_epoch),1) + control_value = base_func.RANDOM_ROTATE * random_rotate + base_func.RANDOM_CROP * random_crop + base_func.RANDOM_FLIP * random_flip + base_func.FIXED_STANDARDIZATION * use_fixed_image_standardization + print('use_fixed_image_standardization=%d' % use_fixed_image_standardization) + control_array = np.ones_like(labels_array) * control_value + sess.run(enqueue_op, {image_paths_placeholder: image_paths_array, labels_placeholder: labels_array, control_placeholder: control_array}) + + # Training loop + train_time = 0 + while batch_number < args.epoch_size: + start_time = time.time() + feed_dict = {learning_rate_placeholder: lr, phase_train_placeholder:True, batch_size_placeholder:args.batch_size} + tensor_list = [loss, train_op, step, prelogits, cross_entropy_mean, learning_rate, accuracy] + + loss_, _, step_, prelogits_, cross_entropy_mean_, lr_, accuracy_ = sess.run(tensor_list, feed_dict=feed_dict) + + duration = time.time() - start_time + print('Epoch: [%d][%d/%d]\tTime %.3f\tLoss %2.3f\tXent %2.3f\tAccuracy %2.3f\tLr %2.5f' % + (epoch, batch_number+1, args.epoch_size, duration, loss_, cross_entropy_mean_, accuracy_, lr_ )) + batch_number += 1 + train_time += duration + + return True + +def save_variables_and_metagraph(sess, saver, summary_writer, model_dir, model_name, step): + # Save the model checkpoint + print('Saving variables') + start_time = time.time() + checkpoint_path = os.path.join(model_dir, 'model-%s.ckpt' % model_name) + saver.save(sess, checkpoint_path, global_step=step, write_meta_graph=False) + save_time_variables = time.time() - start_time + print('Variables saved in %.2f seconds' % save_time_variables) + metagraph_filename = os.path.join(model_dir, 'model-%s.meta' % model_name) + save_time_metagraph = 0 + if not os.path.exists(metagraph_filename): + print('Saving metagraph') + start_time = time.time() + saver.export_meta_graph(metagraph_filename) + save_time_metagraph = time.time() - start_time + print('Metagraph saved in %.2f seconds' % save_time_metagraph) + summary = tf.Summary() + #pylint: disable=maybe-no-member + summary.value.add(tag='time/save_variables', simple_value=save_time_variables) + summary.value.add(tag='time/save_metagraph', simple_value=save_time_metagraph) + summary_writer.add_summary(summary, step) + + +def parse_arguments(argv): + parser = argparse.ArgumentParser() + + parser.add_argument('--logs_base_dir', type=str, + help='Directory where to write event logs.', default='~/logs/base_func') + parser.add_argument('--models_base_dir', type=str, + help='Directory where to write trained models and checkpoints.', default='~/models/base_func') + parser.add_argument('--gpu_memory_fraction', type=float, + help='Upper bound on the amount of GPU memory that will be used by the process.', default=1.0) + parser.add_argument('--gpus', type=str, + help='Indicate the GPUs to be used.', default='2') + + parser.add_argument('--pretrained_model', type=str, + help='Load a pretrained model before training starts.') + + parser.add_argument('--class_num_changed', type=bool, default=False, + help='indicate if the class_num is different from pretrained.') + parser.add_argument('--data_dir', type=str, + help='Path to the data directory containing aligned face patches.', + default='~/datasets/casia/casia_maxpy_mtcnnalign_182_160') + parser.add_argument('--model_def', type=str, + help='Model definition. Points to a module containing the definition of the inference graph.', default='models.inception_resnet_v1') + parser.add_argument('--max_nrof_epochs', type=int, + help='Number of epochs to run.', default=20) + parser.add_argument('--batch_size', type=int, + help='Number of images to process in a batch.', default=100) + parser.add_argument('--image_size', type=int, + help='Image size (height, width) in pixels.', default=224) + parser.add_argument('--epoch_size', type=int, + help='Number of batches per epoch.', default=5000) + parser.add_argument('--class_num', type=int, + help='Dimensionality of the embedding.', default=1000) + parser.add_argument('--random_crop', + help='Performs random cropping of training images. If false, the center image_size pixels from the training images are used. ' + + 'If the size of the images in the data directory is equal to image_size no cropping is performed', action='store_true') + parser.add_argument('--random_flip', + help='Performs random horizontal flipping of training images.', action='store_true') + parser.add_argument('--random_rotate', + help='Performs random rotations of training images.', action='store_true') + parser.add_argument('--use_fixed_image_standardization', + help='Performs fixed standardization of images.', action='store_true') + parser.add_argument('--keep_probability', type=float, + help='Keep probability of dropout for the fully connected layer(s).', default=1.0) + parser.add_argument('--weight_decay', type=float, + help='L2 weight regularization.', default=0.0) + parser.add_argument('--optimizer', type=str, choices=['ADAGRAD', 'ADADELTA', 'ADAM', 'RMSPROP', 'MOM'], + help='The optimization algorithm to use', default='ADAGRAD') + parser.add_argument('--learning_rate', type=float, + help='Initial learning rate. If set to a negative value a learning rate ' + + 'schedule can be specified in the file "learning_rate_schedule.txt"', default=0.1) + parser.add_argument('--learning_rate_decay_epochs', type=int, + help='Number of epochs between learning rate decay.', default=100) + parser.add_argument('--learning_rate_decay_factor', type=float, + help='Learning rate decay factor.', default=1.0) + parser.add_argument('--moving_average_decay', type=float, + help='Exponential decay for tracking of training parameters.', default=0.9999) + parser.add_argument('--seed', type=int, + help='Random seed.', default=666) + parser.add_argument('--nrof_preprocess_threads', type=int, + help='Number of preprocessing (data loading and augmentation) threads.', default=4) + parser.add_argument('--log_histograms', + help='Enables logging of weight/bias histograms in tensorboard.', action='store_true') + parser.add_argument('--learning_rate_schedule_file', type=str, + help='File containing the learning rate schedule that is used when learning_rate is set to to -1.', default='data/learning_rate_schedule.txt') + + + return parser.parse_args(argv) + + +if __name__ == '__main__': + args = parse_arguments(sys.argv[1:]) + print('gpu device ID: %s'%args.gpus) + main(args) diff --git a/mobilenetv1/validation_imagenet.py b/mobilenetv1/validation_imagenet.py new file mode 100644 index 0000000..84a2238 --- /dev/null +++ b/mobilenetv1/validation_imagenet.py @@ -0,0 +1,110 @@ +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +from datetime import datetime +import os.path +from os import environ +import time +import sys +import random +import tensorflow as tf +import numpy as np +import importlib +import argparse +import base_func +import h5py +import math +import tensorflow.contrib.slim as slim +from tensorflow.python.ops import data_flow_ops +from tensorflow.python.framework import ops +from tensorflow.python.ops import array_ops +import pickle +from scipy import misc + +def _softmax(x, axis=-1, t=-100.): + x = x - np.max(x) + if np.min(x) < t: + x = x / np.min(x) * t + e_x = np.exp(x) + return e_x / e_x.sum(axis, keepdims=True) + +def main(args): + environ['CUDA_VISIBLE_DEVICES'] = args.gpus + + image_size = (args.image_size, args.image_size) + + dataset = base_func.get_dataset(args.data_dir) + + val_set = dataset + image_list, label_list = base_func.get_image_paths_and_labels(val_set) + + nrof_classes = len(val_set) + val_image_num = len(image_list) + + top1 = 0.0 + top5 = 0.0 + + with tf.Graph().as_default() as graph: + with tf.Session() as sess: + base_func.load_model(args.model) + + input_image = sess.graph.get_tensor_by_name('input:0') + output = sess.graph.get_tensor_by_name('MobileNetV1/Bottleneck2/BatchNorm/Reshape_1:0') + + if (os.path.isdir(args.model)): + phase_train_placeholder = tf.get_default_graph().get_tensor_by_name("phase_train:0") + + + for i in range(val_image_num): + print(image_list[i]) + # images = base_func.load_data([image_list[i]], False, False, args.image_size) + img = np.array(misc.imread(image_list[i], mode='RGB')) + # img = base_func.crop(img, False, args.image_size) + img = misc.imresize(img, image_size, interp='bilinear') + img = img / 255. + images = [img] + + feed_dict={input_image:images} + if (os.path.isdir(args.model)): + feed_dict={input_image:images,phase_train_placeholder:False} + + logits = sess.run(output,feed_dict=feed_dict) + pred = _softmax(logits[0,:]) + # print(logits) + des_idx = np.argsort(pred) + # des_data = np.sort(logits) + # print(des_data[0,995:]) + + if (des_idx[nrof_classes-1]) == label_list[i]: + top1 += 1 + for j in range(5): + if (des_idx[nrof_classes-1-j]) == label_list[i]: + top5 += 1 + break + print("%05d th pic have been validated, top1 = %.2f%% top5 = %.2f%% " % (i+1,top1/(i+1)*100.,top5/(i+1)*100.)) + + +def parse_arguments(argv): + parser = argparse.ArgumentParser() + + parser.add_argument('model', type=str, + help='Model definition. ckpt folder or pb file', default='mobilenet_v1.pb') + parser.add_argument('data_dir', type=str, + help='Path to the data directory containing aligned face patches.', + default='~/datasets/iamgenet_val') + parser.add_argument('--image_size', type=int, + help='image size.', default=224) + parser.add_argument('--gpu_memory_fraction', type=float, + help='Upper bound on the amount of GPU memory that will be used by the process.', default=1.0) + parser.add_argument('--gpus', type=str, + help='Indicate the GPUs to be used.', default='3') + + + return parser.parse_args(argv) + + +if __name__ == '__main__': + args = parse_arguments(sys.argv[1:]) + print('gpu device ID: %s'%args.gpus) + main(args) diff --git a/pretrained/mobilenetv1_1.0.pb b/pretrained/mobilenetv1_1.0.pb new file mode 100644 index 0000000..849b226 Binary files /dev/null and b/pretrained/mobilenetv1_1.0.pb differ