Skip to content
Snippets Groups Projects
Commit bd2a68ed authored by Erkan Karabulut's avatar Erkan Karabulut
Browse files

create occupancy sensor and a coaty agent

parent 0be47917
No related branches found
No related tags found
No related merge requests found
File moved
File moved
......@@ -62,8 +62,14 @@ def detect(image: Image) -> None:
# Run object detection estimation using the model.
detections = detector.detect(image)
print(detections)
person_count = 0
for detection in detections:
for category in detection.categories:
if category.label == "person":
person_count = person_count + 1
print(category.label)
print(person_count)
# Calculate the FPS
if counter % fps_avg_frame_count == 0:
end_time = time.time()
......@@ -73,7 +79,7 @@ def detect(image: Image) -> None:
# Show the FPS
fps_text = 'FPS = {:.1f}'.format(fps)
text_location = (left_margin, row_size)
print(fps_text, text_location, cv2.FONT_HERSHEY_PLAIN,
cv2.putText(fps_text, text_location, cv2.FONT_HERSHEY_PLAIN,
font_size, text_color, font_thickness)
......
File moved
/**
* Copyright (C) 2020 fortiss GmbH
* @author Nisrine Bnouhanna – {@link "bnouhanna@fortiss.org"}
* @version 1.0
* create the coaty container for the IoT thing based on sensorThings controller
* Uses coatyio Siemens AG. Licensed under the MIT License
*/
import {Components, Configuration, Container} from "@coaty/core";
import {NodeUtils} from "@coaty/core/runtime-node";
import {SensorThingsController} from "../controller/sensor-things-controller";
import {agentInfo} from "./../agent.info";
// define broker URL
NodeUtils.logInfo(`BROKER_URL=${process.env.BROKER_URL}`);
if (!process.env.BROKER_URL) {
NodeUtils.logError(new Error("Missing Broker URL"), "Environment variable BROKER_URL not specified.");
process.exit(1);
}
NodeUtils.handleProcessTermination();
// coaty agent configuration
const configuration: Configuration = {
common: {
agentInfo,
agentIdentity: {name: "Temperature Sensor"},
},
communication: {
brokerUrl: process.env.BROKER_URL,
mqttClientOptions : {
username: process.env.BROKER_USERNAME,
password: process.env.BROKER_PASSWORD,
},
namespace: "coaty.demonstrator2",
shouldAutoStart: true,
},
controllers: {
SensorThingsController: {
monitoringInterval: 5000, // send metrics every 5 seconds
},
},
};
const components: Components = {
controllers: {
SensorThingsController,
},
};
// Create the Coaty container with the specified components and autostart
// the communication manager.
const container = Container.resolve(components, configuration);
// Log broker connection state changes (online/offline) to the console.
NodeUtils.logCommunicationState(container);
/**
* Copyright (C) 2020 fortiss GmbH
* @author Nisrine Bnouhanna – bnouhanna@fortiss.org
* @version 1.0
* specify the controller for the IoT Thing coaty agent based on the sensorSource controller
* Uses coatyio Siemens AG. Licensed under the MIT License
*/
import {AdvertiseEvent, CoatyObject, ResolveEvent, TimeInterval, Uuid} from "@coaty/core";
import {NodeUtils} from "@coaty/core/runtime-node";
import {
EncodingTypes,
FeatureOfInterest,
MockSensorIo,
Observation,
Sensor,
SensorContainer,
SensorSourceController,
SensorThingsTypes,
Thing,
} from "@coaty/core/sensor-things";
import * as sensor_config from "../sensor";
import * as dhtSensorDriver from "node-dht-sensor";
/**
* Handles registration of sensor objects.
*/
export class SensorThingsController extends SensorSourceController {
private _thing: Thing;
private _sensorsArray: Sensor[] = [];
private _featureOfInterest: FeatureOfInterest;
private _occupancySensor: sensor_config.Occupancy;
private _dataStore = new Map<Uuid, CoatyObject>();
/**
* Initialize the sensors right after coaty starts to run
*/
onCommunicationManagerStarting() {
super.onCommunicationManagerStarting();
this._occupancySensor = new sensor_config.Occupancy();
this._createObjects();
this._observeDiscover();
NodeUtils.logInfo(`${this._thing.name} ID: ${this._thing.objectId}`);
}
/**
* Create an observation
* @param container
* @param value
* @param resultQuality
* @param validTime
* @param parameters
* @protected
*/
protected createObservation(
container: SensorContainer,
value: any,
resultQuality?: string[],
validTime?: TimeInterval,
parameters?: { [key: string]: any; }): Observation {
return super.createObservation(
container,
this._observation(container.sensor.objectId),
resultQuality,
validTime,
parameters,
this._featureOfInterest.objectId);
}
/**
* Initialize the IoT thing, sensors and featureOfInterest objects and advertise them
* @private
*/
private _createObjects() {
this._sensorsArray = [];
// describe the IoT thing
this._thing = {
name: "Environment monitoring Thing",
objectId: "e0635d91-e70e-4aeb-83ee-d6bd4e570ab4",
objectType: SensorThingsTypes.OBJECT_TYPE_THING,
parentObjectId: this.container.identity.objectId,
coreType: "CoatyObject",
description: "An IoT device with multiple sensors",
// locationId: this._featureOfInterest.objectId,
};
// advertise the IoT thing
this._dataStore.set(this._thing.objectId, this._thing);
this.communicationManager.publishAdvertise(AdvertiseEvent.withObject(this._thing));
// relate sensors to the IoT thing
this._registerSensor(this._occupancySensor._getSensorConfig(this._thing.objectId));
// initialize a featureOFInterest(location) object for the thing
this._featureOfInterest = {
name: "Location",
objectId: "1f554821-5664-44ea-bbc2-edf28c7d810d",
objectType: SensorThingsTypes.OBJECT_TYPE_FEATURE_OF_INTEREST,
coreType: "CoatyObject",
description: "The location of the Thing",
encodingType: EncodingTypes.UNDEFINED,
parentObjectId: this._thing.objectId,
metadata: {
building: {
name: "HT",
address: "Mies-van-der-Rohe-Straße 6, 80807 München"
},
floor: "15",
room: "1",
},
};
// advertise the featureOfInterest
this._dataStore.set(this._featureOfInterest.objectId, this._featureOfInterest);
this.communicationManager.publishAdvertise(AdvertiseEvent.withObject(this._featureOfInterest));
}
/**
* register a given sensor in coaty
* @param sensor
* @private
*/
private _registerSensor(sensor: Sensor) {
this._dataStore.set(sensor.objectId, sensor);
this._sensorsArray.push(sensor);
this.registerSensor(sensor, new MockSensorIo(), "channel", this.options.monitoringInterval);
}
/**
* Observer/discover IoT things, sensors and featureOfInterest objects
* @private
*/
private _observeDiscover() {
this.communicationManager.observeDiscover()
.subscribe(event => {
if (event.data.isDiscoveringObjectId) {
if (this._dataStore.has(event.data.objectId)) {
event.resolve(ResolveEvent.withObject(this._dataStore.get(event.data.objectId)));
}
} else if (event.data.isObjectTypeCompatible(SensorThingsTypes.OBJECT_TYPE_THING)) {
event.resolve(ResolveEvent.withObject(this._thing));
} else if (event.data.isObjectTypeCompatible(SensorThingsTypes.OBJECT_TYPE_FEATURE_OF_INTEREST)) {
event.resolve(ResolveEvent.withObject(this._featureOfInterest));
}
});
}
/**
* Read observations from sensors
* @param sensorId
* @private
*/
private _observation(sensorId: string): number {
const metricIndex = this._sensorsArray.findIndex(s => s.objectId === sensorId);
switch (metricIndex) {
case 0:
return this._occupancySensor._getValue();
default:
throw new TypeError("There are only 5 metrics. Got index " + metricIndex);
}
}
}
"""
Copyright (C) 2021 fortiss GmbH
@author Erkan Karabulut – karabulut@fortiss.org
@version 1.0
Detect humans
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import sys
import time
import cv2
from object_detector import ObjectDetector
from object_detector import ObjectDetectorOptions
import utils
import rospy
import argparse
import io
import time
import numpy as np
from matplotlib import cm
from sensor_msgs.msg import CompressedImage
from PIL import Image
from tflite_runtime.interpreter import Interpreter
model = None
def detect(image: Image) -> None:
"""
Continuously run inference on images acquired from the camera.
"""
# Variables to calculate FPS
counter, fps = 0, 0
start_time = time.time()
# Visualization parameters
row_size = 20 # pixels
left_margin = 24 # pixels
text_color = (0, 0, 255) # red
font_size = 1
font_thickness = 1
fps_avg_frame_count = 10
# Initialize the object detection model
options = ObjectDetectorOptions(
num_threads=1,
score_threshold=0.3,
max_results=3,
enable_edgetpu=False)
detector = ObjectDetector(model_path=model, options=options)
counter += 1
image = np.array(image)
# Run object detection estimation using the model.
detections = detector.detect(image)
person_count = 0
for detection in detections:
for category in detection.categories:
if category.label == "person":
person_count = person_count + 1
print(person_count)
# Calculate the FPS
# if counter % fps_avg_frame_count == 0:
# end_time = time.time()
# fps = fps_avg_frame_count / (end_time - start_time)
# start_time = time.time()
# Show the FPS
# fps_text = 'FPS = {:.1f}'.format(fps)
# text_location = (left_margin, row_size)
# cv2.putText(fps_text, text_location, cv2.FONT_HERSHEY_PLAIN,
# font_size, text_color, font_thickness)
def image_received(raw_compressed_image):
image = Image.open(io.BytesIO(bytearray(raw_compressed_image.data))). \
convert('RGB')
# image.save("image" + str(raw_compressed_image.header.stamp) + ".jpeg")
detect(image=image)
if __name__ == '__main__':
rospy.init_node('human_detection', anonymous=True)
parser = argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument(
'--model',
help='Path of the object detection model.',
required=False,
default='efficientdet_lite0.tflite')
parser.add_argument(
'--cameraId', help='Id of camera.', required=False, type=int, default=0)
parser.add_argument(
'--frameWidth',
help='Width of frame to capture from camera.',
required=False,
type=int,
default=640)
parser.add_argument(
'--frameHeight',
help='Height of frame to capture from camera.',
required=False,
type=int,
default=480)
parser.add_argument(
'--numThreads',
help='Number of CPU threads to run the model.',
required=False,
type=int,
default=4)
parser.add_argument(
'--enableEdgeTPU',
help='Whether to run the model on EdgeTPU.',
action='store_true',
required=False,
default=False)
args = parser.parse_args()
model = args.model
sub = rospy.Subscriber('/raspicam_node/image/compressed', CompressedImage, image_received)
rospy.spin()
/**
* Copyright (C) 2021 fortiss GmbH
* @author Erkan Karabulut – {@link "karabulut@fortiss.org"}
* @version 1.0
* Export sensors sensor descriptions
*/
export {Occupancy} from "./occupancy";
/**
* Copyright (C) 2021 fortiss GmbH
* @author Erkan Karabulut – {@link "karabulut@fortiss.org"}
* @version 1.0
* Describing an air quality sensor and reading data from it
*/
import {ObservationTypes, Sensor, SensorEncodingTypes, SensorThingsTypes} from "@coaty/core/sensor-things";
import {spawn} from "child_process";
/**
* Describes the Grove air quality sensor and runs the sensor driver to read observations
*/
export class Occupancy {
_value: number = 0;
constructor() {
this._listenSensorData();
}
/**
* describe the air quality sensor
* @param parentObjectId: IoT thing object ID
*/
public _getSensorConfig(parentObjectId: string) {
const airQualitySensor: Sensor = {
name: "Camera sensor",
objectId: "17f98945-4a78-41b0-84f0-bb9a4a807b13",
objectType: SensorThingsTypes.OBJECT_TYPE_SENSOR,
coreType: "CoatyObject",
description: `Camera sensor that is used to detect people in a video stream`,
unitOfMeasurement: {
name: "Occupancy",
symbol: "?",
definition: "A link to the definition",
},
observationType: ObservationTypes.MEASUREMENT,
observedProperty: {
name: "Headcount",
description: "The number of people",
definition: "A link to the definition",
},
parentObjectId: parentObjectId,
encodingType: SensorEncodingTypes.UNDEFINED,
metadata: {},
};
return airQualitySensor;
}
public _getValue() {
return this._value;
}
/**
* Run sensor driver to read observation data
* @returns sensor observation data
* @private
*/
private _listenSensorData() {
const airQualitySensor = spawn("python3", [__dirname + "/../../driver/occupancy.py", "0"]);
airQualitySensor.stdout.on("data", (data) => {
this._value = parseFloat(Buffer.from(data).toString());
});
}
}
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment