Skip to content

Instantly share code, notes, and snippets.

@vinodsr
Created October 10, 2020 07:47
Show Gist options
  • Star 1 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save vinodsr/edabaf37ca8ab77f68688844ff48124e to your computer and use it in GitHub Desktop.
Save vinodsr/edabaf37ca8ab77f68688844ff48124e to your computer and use it in GitHub Desktop.
WEB_POrt: 5000
debug: 1
mqtt:
host: 192.168.1.100
topic_prefix: frigate
# client_id: frigate # Optional -- set to override default client id of 'frigate' if running multiple instances
# user: username # Optional
#################
## Environment variables that begin with 'FRIGATE_' may be referenced in {}.
## password: '{FRIGATE_MQTT_PASSWORD}'
#################
# password: password # Optional
#################
# Default ffmpeg args. Optional and can be overwritten per camera.
# Should work with most RTSP cameras that send h264 video
# Built from the properties below with:
# "ffmpeg" + global_args + input_args + "-i" + input + output_args
#################
# ffmpeg:
# global_args:
# - -hide_banner
# - -loglevel
# - panic
# hwaccel_args: []
# input_args:
# - -avoid_negative_ts
# - make_zero
# - -fflags
# - nobuffer
# - -flags
# - low_delay
# - -strict
# - experimental
# - -fflags
# - +genpts+discardcorrupt
# - -vsync
# - drop
# - -rtsp_transport
# - tcp
# - -stimeout
# - '5000000'
# - -use_wallclock_as_timestamps
# - '1'
# output_args:
# - -f
# - rawvideo
# - -pix_fmt
# - rgb24
####################
# Global object configuration. Applies to all cameras
# UNless overridden at the camera levels.
# Keys must be valid labels. By default, the model uses coco (https://dl.google.com/coral/canned_models/coco_labels.txt).
# All labels from the model are reported over MQTT. These values are used to filter out false positives.
# min_area (optional): minimum width*height of the bounding box for the detected person
# max_area (optional): maximum width*height of the bounding box for the detected person
# threshold (optional): The minimum decimal percentage (50% hit = 0.5) for the confidence from tensorflow
####################
objects:
track:
- person
# - car
# - truck
filters:
person:
min_area: 5000
max_area: 100000
min_score: 0.3
threshold: 0.8
zones_old:
#################
# Name of the zone
################
front_steps:
front:
####################
# For each camera, a list of x,y coordinates to define the polygon of the zone. The top
# left corner is 0,0. Can also be a comma separated string of all x,y coordinates combined.
# The same zone can exist across multiple cameras if they have overlapping FOVs.
# An object is determined to be in the zone based on whether or not the bottom center
# of it's bounding box is within the polygon. The polygon must have at least 3 points.
# Coordinates can be generated at https://www.image-map.net/
####################
coordinates:
- 0,0
- 747,939
- 788,805
################
# Zone level object filters. These are applied in addition to the global and camera filters
# and should be more restrictive than the global and camera filters. The global and camera
# filters are applied upstream.
################
filters:
person:
min_area: 5000
max_area: 100000
threshold: 0.8
driveway2:
front:
coordinates: 0,0,800,200,900,200
cameras:
front:
ffmpeg:
################
# Source passed to ffmpeg after the -i parameter. Supports anything compatible with OpenCV and FFmpeg.
# Environment variables that begin with 'FRIGATE_' may be referenced in {}
################
input: rtsp://<username>:<password>@<cctv device ip>:554/Streaming/channels/102
#################
# These values will override default values for just this camera
#################
# global_args: []
# hwaccel_args: []
# input_args: []
# output_args: []
################
## Optionally specify the resolution of the video feed. Frigate will try to auto detect if not specified
################
# height: 1280
# width: 720
################
## Optional mask. Must be the same aspect ratio as your video feed. Value is either the
## name of a file in the config directory or a base64 encoded bmp image prefixed with
## 'base64,' eg. 'base64,asfasdfasdf....'.
##
## The mask works by looking at the bottom center of the bounding box for the detected
## person in the image. If that pixel in the mask is a black pixel, it ignores it as a
## false positive. In my mask, the grass and driveway visible from my backdoor camera
## are white. The garage doors, sky, and trees (anywhere it would be impossible for a
## person to stand) are black.
##
## Masked areas are also ignored for motion detection.
################
# mask: back-mask.bmp
################
# Allows you to limit the framerate within frigate for cameras that do not support
# custom framerates. A value of 1 tells frigate to look at every frame, 2 every 2nd frame,
# 3 every 3rd frame, etc.
################
take_frame: 15
################
# This will save a clip for each tracked object by frigate along with a json file that contains
# data related to the tracked object. This works by telling ffmpeg to write video segments to /cache
# from the video stream without re-encoding. Clips are then created by using ffmpeg to merge segments
# without re-encoding. The segments saved are unaltered from what frigate receives to avoid re-encoding.
# They do not contain bounding boxes. 30 seconds of video is added to the start of the clip. These are
# optimized to capture "false_positive" examples for improving frigate.
#
# NOTE: This will only work for camera feeds that can be copied into the mp4 container format without
# encoding such as h264. I do not expect this to work for mjpeg streams, and it may not work for many other
# types of streams.
#
# WARNING: Videos in /cache are retained until there are no ongoing events. If you are tracking cars or
# other objects for long periods of time, the cache will continue to grow indefinitely.
################
save_clips:
enabled: True
#########
# Number of seconds before the event to include in the clips
#########
pre_capture: 30
################
# Configuration for the snapshots in the debug view and mqtt
################
snapshots:
show_timestamp: True
draw_zones: True
################
# Camera level object config. This config is merged with the global config above.
################
objects:
track:
- person
filters:
person:
min_area: 5000
max_area: 100000
min_score: 0.3
threshold: 0.8
back:
ffmpeg:
################
# Source passed to ffmpeg after the -i parameter. Supports anything compatible with OpenCV and FFmpeg.
# Environment variables that begin with 'FRIGATE_' may be referenced in {}
################
input: rtsp://admin:Password12_@192.168.1.18:554/Streaming/channels/402
#################
# These values will override default values for just this camera
#################
# global_args: []
# hwaccel_args: []
# input_args: []
# output_args: []
################
## Optionally specify the resolution of the video feed. Frigate will try to auto detect if not specified
################
# height: 1280
# width: 720
################
## Optional mask. Must be the same aspect ratio as your video feed. Value is either the
## name of a file in the config directory or a base64 encoded bmp image prefixed with
## 'base64,' eg. 'base64,asfasdfasdf....'.
##
## The mask works by looking at the bottom center of the bounding box for the detected
## person in the image. If that pixel in the mask is a black pixel, it ignores it as a
## false positive. In my mask, the grass and driveway visible from my backdoor camera
## are white. The garage doors, sky, and trees (anywhere it would be impossible for a
## person to stand) are black.
##
## Masked areas are also ignored for motion detection.
################
# mask: back-mask.bmp
################
# Allows you to limit the framerate within frigate for cameras that do not support
# custom framerates. A value of 1 tells frigate to look at every frame, 2 every 2nd frame,
# 3 every 3rd frame, etc.
################
take_frame: 10
################
# This will save a clip for each tracked object by frigate along with a json file that contains
# data related to the tracked object. This works by telling ffmpeg to write video segments to /cache
# from the video stream without re-encoding. Clips are then created by using ffmpeg to merge segments
# without re-encoding. The segments saved are unaltered from what frigate receives to avoid re-encoding.
# They do not contain bounding boxes. 30 seconds of video is added to the start of the clip. These are
# optimized to capture "false_positive" examples for improving frigate.
#
# NOTE: This will only work for camera feeds that can be copied into the mp4 container format without
# encoding such as h264. I do not expect this to work for mjpeg streams, and it may not work for many other
# types of streams.
#
# WARNING: Videos in /cache are retained until there are no ongoing events. If you are tracking cars or
# other objects for long periods of time, the cache will continue to grow indefinitely.
################
save_clips:
enabled: True
#########
# Number of seconds before the event to include in the clips
#########
pre_capture: 30
################
# Configuration for the snapshots in the debug view and mqtt
################
snapshots:
show_timestamp: True
draw_zones: True
################
# Camera level object config. This config is merged with the global config above.
################
objects:
track:
- person
- car
- umbrella
filters:
person:
min_area: 5000
max_area: 100000
threshold: 0.8
min_score: 0.3
[
{
"id": "e1b17e4f.9b4b7",
"type": "tab",
"label": "Flow 4",
"disabled": false,
"info": ""
},
{
"id": "4d6ed5d0.8ad21c",
"type": "mqtt in",
"z": "e1b17e4f.9b4b7",
"name": "",
"topic": "frigate/front/person/event",
"qos": "0",
"datatype": "auto",
"broker": "2c6c3137.46f4ee",
"x": 250,
"y": 220,
"wires": [
[
"8f3c4648.383b28",
"5122c127.8f448"
]
]
},
{
"id": "8f3c4648.383b28",
"type": "debug",
"z": "e1b17e4f.9b4b7",
"name": "Frigate",
"active": true,
"tosidebar": true,
"console": false,
"tostatus": false,
"complete": "true",
"targetType": "full",
"statusVal": "",
"statusType": "auto",
"x": 470,
"y": 600,
"wires": []
},
{
"id": "5122c127.8f448",
"type": "function",
"z": "e1b17e4f.9b4b7",
"name": "Parse JSON",
"func": "\nmsg.payload = JSON.parse(msg.payload);\nreturn msg;",
"outputs": 1,
"noerr": 0,
"initialize": "",
"finalize": "",
"x": 990,
"y": 220,
"wires": [
[
"ab3aee8f.f7437"
]
]
},
{
"id": "bdc966c7.f15408",
"type": "mqtt in",
"z": "e1b17e4f.9b4b7",
"name": "",
"topic": "frigate/#",
"qos": "0",
"datatype": "auto",
"broker": "2c6c3137.46f4ee",
"x": 190,
"y": 420,
"wires": [
[
"8f3c4648.383b28"
]
]
},
{
"id": "ab3aee8f.f7437",
"type": "function",
"z": "e1b17e4f.9b4b7",
"name": "Send picture",
"func": "\nimage = Buffer.from(msg.payload.image, \"base64\");\nlet camera = \"!\";\ntry{\n camera = msg.topic.split(\"/\")[1];\n} catch(e){\n \n}\n\nlet message = `Someone 🧍 is at the ${camera} side`;\nif(msg.payload.status === \"OFF\"){\n message = `Someone 🚶 had left ${camera} side`;\n}\nvar pl = {\n content: image,\n caption: message,\n type : 'photo',\n chatId: <add your telegramID>\n}\n\nmsg.payload = pl;\n\nreturn msg;",
"outputs": 1,
"noerr": 9,
"initialize": "",
"finalize": "",
"x": 1230,
"y": 220,
"wires": [
[
"d723a983.c93188"
]
]
},
{
"id": "d723a983.c93188",
"type": "telegram sender",
"z": "e1b17e4f.9b4b7",
"name": "Send Guard Status",
"bot": "88cf9c06.48b42",
"x": 1510,
"y": 320,
"wires": [
[]
]
},
{
"id": "639d4f5.3b21ab",
"type": "function",
"z": "e1b17e4f.9b4b7",
"name": "Frigate Status",
"func": "let oldmsg = msg;\nmsg = {} \nmsg.payload = {};\nmsg.payload.chatId = []; // add your telegramid\nmsg.payload.type = \"message\";\nlet message = \"Frigate Status : \"+ oldmsg.payload;\nmsg.payload.content = message;\nmsg.payload.options = {\n parse_mode : \"Markdown\"\n}\nreturn msg;",
"outputs": 1,
"noerr": 0,
"initialize": "",
"finalize": "",
"x": 1240,
"y": 340,
"wires": [
[
"d723a983.c93188"
]
]
},
{
"id": "a39696e5.756f98",
"type": "mqtt in",
"z": "e1b17e4f.9b4b7",
"name": "",
"topic": "frigate/available",
"qos": "0",
"datatype": "auto",
"broker": "2c6c3137.46f4ee",
"x": 220,
"y": 340,
"wires": [
[
"639d4f5.3b21ab"
]
]
},
{
"id": "15f05761.a89eb9",
"type": "mqtt in",
"z": "e1b17e4f.9b4b7",
"d": true,
"name": "",
"topic": "frigate/front/person",
"qos": "0",
"datatype": "auto",
"broker": "2c6c3137.46f4ee",
"x": 230,
"y": 280,
"wires": [
[]
]
},
{
"id": "5c28bc9a.5bc114",
"type": "mqtt in",
"z": "e1b17e4f.9b4b7",
"name": "",
"topic": "frigate/back/person/event",
"qos": "0",
"datatype": "auto",
"broker": "2c6c3137.46f4ee",
"x": 250,
"y": 160,
"wires": [
[
"8f3c4648.383b28",
"5122c127.8f448"
]
]
},
{
"id": "2c6c3137.46f4ee",
"type": "mqtt-broker",
"z": "",
"name": "",
"broker": "localhost",
"port": "1883",
"clientid": "",
"usetls": false,
"compatmode": false,
"keepalive": "60",
"cleansession": true,
"birthTopic": "",
"birthQos": "0",
"birthPayload": "",
"closeTopic": "",
"closeQos": "0",
"closePayload": "",
"willTopic": "",
"willQos": "0",
"willPayload": ""
},
{
"id": "88cf9c06.48b42",
"type": "telegram bot",
"z": "",
"botname": "BotName",
"usernames": "",
"chatids": "",
"baseapiurl": "",
"updatemode": "polling",
"pollinterval": "300",
"usesocks": false,
"sockshost": "",
"socksport": "6667",
"socksusername": "anonymous",
"sockspassword": "",
"bothost": "",
"localbotport": "8443",
"publicbotport": "8443",
"privatekey": "",
"certificate": "",
"useselfsignedcertificate": false,
"sslterminated": false,
"verboselogging": true
}
]
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment