For Homebrew v2.6.x and below:
brew cask install ngrok
For Homebrew v2.7.x and above:
// config/passport.js | |
// load all the things we need | |
var LocalStrategy = require('passport-local').Strategy; | |
var mysql = require('mysql'); | |
var connection = mysql.createConnection({ | |
host : 'localhost', | |
user : 'root', |
#!/bin/sh | |
file=path/to/file | |
bucket=your-bucket | |
resource="/${bucket}/${file}" | |
contentType="application/x-compressed-tar" | |
dateValue="`date +'%a, %d %b %Y %H:%M:%S %z'`" | |
stringToSign="GET | |
${contentType} | |
${dateValue} | |
${resource}" |
pragma solidity ^0.4.10; | |
// Update: Just use HardHat's: https://github.com/nomiclabs/hardhat/blob/master/packages/hardhat-core/console.sol | |
// Enables event logging of the format `console.log('descriptive string', variable)`, | |
// without having to worry about the variable type (as long as an event has been declared for that type in the | |
// Console contract. | |
contract Console { | |
event LogUint(string, uint); |
pragma solidity ^0.4.10; | |
contract RoleBasedAcl { | |
address creator; | |
mapping(address => mapping(string => bool)) roles; | |
function RoleBasedAcl () { | |
creator = msg.sender; | |
} | |
This is a sample script for uploading files from local PC to Google Drive using Python. In this sample, Quickstart is not used. So when you use this script, please retrieve access token.
curl -X POST \
-H "Authorization: Bearer ### access token ###" \
-F "metadata={name : 'sample.png', parents: ['### folder ID ###']};type=application/json;charset=UTF-8" \
-F "file=@sample.png;type=image/png" \
"https://www.googleapis.com/upload/drive/v3/files?uploadType=multipart"
import numpy as np | |
from scipy import signal | |
def gaussian_kernel(n, std, normalised=False): | |
''' | |
Generates a n x n matrix with a centered gaussian | |
of standard deviation std centered on it. If normalised, | |
its volume equals 1.''' | |
gaussian1D = signal.gaussian(n, std) | |
gaussian2D = np.outer(gaussian1D, gaussian1D) |
The problem with large language models is that you can’t run these locally on your laptop. Thanks to Georgi Gerganov and his llama.cpp project, it is now possible to run Meta’s LLaMA on a single computer without a dedicated GPU.
There are multiple steps involved in running LLaMA locally on a M1 Mac after downloading the model weights.
""" | |
This Python script provides a utility to compute the cosine similarity between two text sentences using the TF-IDF | |
(Term Frequency-Inverse Document Frequency) vectorization approach. | |
Key Components: | |
1. Import Statements: The script begins by importing necessary modules: | |
- TfidfVectorizer from sklearn.feature_extraction.text for converting text data into a matrix of TF-IDF features. | |
- cosine_similarity from sklearn.metrics.pairwise to compute the similarity between two vectors in the TF-IDF space. | |
- sys for accessing command-line arguments. |