Skip to content

Instantly share code, notes, and snippets.

View mtpatter's full-sized avatar

Maria Patterson mtpatter

View GitHub Profile
def main():
# Insert standard model card construction here
model_card.model_parameters.data.append(mctlib.Dataset())
model_card.model_parameters.data[1].graphics.description = (
f'{len(X_test)} rows with {len(X_test.columns)} features')
model_card.model_parameters.data[1].graphics.collection = [
mctlib.Graphic(image=mean_radius_test),
mctlib.Graphic(image=mean_texture_test)
]
<div class="row">
{% if thingstoknow and (thingstoknow.intervention) %}
<div class="col card">
<h2>Things to Know</h2>
{% if thingstoknow.intervention %}
<h3>Intervention</h3>
{{ render_considerations(thingstoknow.intervention) }}
{% endif %}
# CUSTOM CLASSES
@dataclasses.dataclass
class Intervention(BaseModelCardField):
"""Assistance or Assessment
Attributes:
description: How the application affects humans.
"""
description: Optional[str] = None
_proto_type: dataclasses.InitVar[type(
# Generate Model Card.
jinja_env = jinja2.Environment(
loader=self._jinja_loader(template_dir),
autoescape=True,
auto_reload=True,
cache_size=0)
template = jinja_env.get_template(template_file)
model_card_file_content = template.render(
model_details=model_card.model_details,
model_parameters=model_card.model_parameters,
// CUSTOM FIELDS
// How the model affects humans.
message Intervention {
// Assistance or Assessment
optional string description = 1;
}
message Thingstoknow {
// Things you should know
repeated Intervention intervention = 1;
def main():
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('topic', type=str,
help='Name of the Kafka topic to stream.')
args = parser.parse_args()
conf = {'bootstrap.servers': 'localhost:9092',
'default.topic.config': {'auto.offset.reset': 'smallest'},
'group.id': socket.gethostname()}
# Process a Kafka message
def msg_process(msg):
# Print the current time and the message.
time_start = time.strftime("%Y-%m-%d %H:%M:%S")
val = msg.value()
dval = json.loads(val)
print(time_start, dval)
def main():
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('filename', type=str,
help='Time series csv file.')
parser.add_argument('topic', type=str,
help='Name of the Kafka topic to stream.')
parser.add_argument('--speed', type=float, default=1, required=False,
help='Speed up time series by a given multiplicative factor.')
args = parser.parse_args()
def wait_model_ready(model_name, model_version):
client = MlflowClient()
for _ in range(10):
model_version_details = client.get_model_version(name=model_name,
version=model_version)
status = ModelVersionStatus.from_string(model_version_details.status)
print("Model status: %s" % ModelVersionStatus.to_string(status))
if status == ModelVersionStatus.READY:
return True
@mtpatter
mtpatter / mlflow-train-snippet.py
Created July 11, 2021 18:52
Code snippet for sklearn and mlflow
model_path = "clf-model"
# Load a standard machine learning dataset
cancer = load_breast_cancer()
df = pd.DataFrame(cancer['data'], columns=cancer['feature_names'])
df['target'] = cancer['target']
# Optionally write out a subset of the data, used in this tutorial for inference with the API
if args.outputTestData: