Skip to content

Instantly share code, notes, and snippets.

@bpatra
bpatra / operation.qs
Created December 6, 2020 09:31
Operation in QSharp
operation TwoBitstringSuperposition (qs : Qubit[], bits1 : Bool[], bits2 : Bool[]) : Unit {
mutable i0 = Length(qs);
for (i in 0..Length(qs)-1){
if(bits1[i] == bits2[i]){
if(bits1[i]){
X(qs[i]);
}
}else{
if(i > i0){
if(bits1[i] == bits1[i0]){
---
layout: about/about_lyt
title: "A propos"
lang: fr
---
---
layout: about/about_lyt
title: "about"
lang: en
---
---
layout: default
t:
first:
en: "This is my first sentence"
fr: "C'est ma premiàre phrase"
second:
en: "My second sentence"
fr: "Ma deuxième phrase"
third:
├── _site
| └── ...
├── _layouts
| ├── about
| | └──about_lyt.html
| └── hello_lyt.html
├── fr
| ├── à propos
| | └── apropos.html
| └── bonjour.html
├── fr
| ├── à propos
| | └── apropos.html
| └── bonjour.html
├── about
| └──about.html
└── hello.html
├── _config.yml
├── _layouts
| ├── default.html
| └── post.html
├── _site
| └── ...
└── index.html
from inception_resnet_v1 import *
def model_with_inception_resnet_base(pretrained_weights):
model = InceptionResNetV1()
if pretrained_weights == True:
#pre-trained weights https://drive.google.com/file/d/1971Xk5RwedbudGgTIrGAL4F7Aifu7id1/view?usp=sharing
model.load_weights('facenet_weights.h5')
new_model = models.Sequential()
Layer (type) Output Shape Param #
=================================================================
inception_resnet_v1 (Model) (None, 128) 22808144
_________________________________________________________________
dense_1 (Dense) (None, 256) 33024
_________________________________________________________________
dropout_1 (Dropout) (None, 256) 0
_________________________________________________________________
dense_2 (Dense) (None, 64) 16448
_________________________________________________________________
layer_outputs = [layer.output for layer in model.layers[:depth]]
activation_model = models.Model(inputs=model.input, outputs=layer_outputs)
predictions = models.predict(input_img_tensor)