Compare commits

..

2 commits

Author SHA1 Message Date
67a80d5d1a finished GRPC server 2022-01-30 22:09:56 +01:00
4816ec3352 add grpc models
update application interface
switch to pylint
2022-01-28 23:44:08 +01:00
33 changed files with 2644 additions and 297 deletions

View file

@ -1,15 +1,16 @@
repos: repos:
- repo: https://github.com/PyCQA/flake8
rev: 4.0.1
hooks:
- id: flake8
args:
- --count
- --max-complexity=10
- --max-line-length=100
- --show-source
- --statistics
- repo: https://github.com/pre-commit/mirrors-yapf - repo: https://github.com/pre-commit/mirrors-yapf
rev: 'v0.32.0' rev: 'v0.32.0'
hooks: hooks:
- id: yapf - id: yapf
additional_dependencies: [ toml ]
exclude: 'tsgrain_controller/grpc_generated'
- repo: local
hooks:
- id: tox
name: tox
entry: tox
language: system
files: \.py$
pass_filenames: false

12
.pylintrc Normal file
View file

@ -0,0 +1,12 @@
[MASTER]
ignore=grpc_generated
extension-pkg-allow-list=smbus
disable=missing-module-docstring,
missing-class-docstring,
missing-function-docstring,
too-few-public-methods,
invalid-name,
too-many-instance-attributes,
fixme

109
proto/tsgrain.proto Normal file
View file

@ -0,0 +1,109 @@
syntax = "proto3";
import "google/protobuf/empty.proto";
import "google/protobuf/wrappers.proto";
service TSGRain {
// Starte eine neue Bewässerungsaufgabe (oder stoppe eine laufende, wenn
// diese bereits läuft).
rpc StartTask(TaskRequest) returns (TaskRequestResult) {}
// Gibt sämtliche in der Warteschlange befindlichen Bewässerungsaufgaben zurück.
rpc GetTasks(google.protobuf.Empty) returns (TaskList) {}
// Streamt die aktuelle Warteschlange mit ihren Bewässerungsaufgaben,
// d.h. bei Änderung wird die neue Version übertragen.
rpc StreamTasks(google.protobuf.Empty) returns (stream TaskList) {}
// Erstelle einen neuen Bewässerungsjob.
rpc CreateJob(Job) returns (JobID) {}
// Gibt den Bewässerungsjob mit der gegebenen ID zurück.
rpc GetJob(JobID) returns (Job) {}
// Gibt alle gespeicherten Bewässerungsjobs zurück.
rpc GetJobs(google.protobuf.Empty) returns (JobList) {}
// Aktualisiert einen Bewässerungsjob.
rpc UpdateJob(Job) returns (google.protobuf.Empty) {}
// Lösche den Bewässerungsjob mit der gegebenen ID.
rpc DeleteJob(JobID) returns (google.protobuf.Empty) {}
// Gibt zurück, ob der Automatikmodus aktiviert ist.
rpc GetAutoMode(google.protobuf.Empty) returns (google.protobuf.BoolValue) {}
// Aktiviert/deaktiviert den Automatikmodus.
rpc SetAutoMode(google.protobuf.BoolValue) returns (google.protobuf.Empty) {}
// Datum/Uhrzeit/Zeitzone abrufen
rpc GetConfigTime(google.protobuf.Empty) returns (ConfigTime) {}
// Datum/Uhrzeit/Zeitzone einstellen
rpc SetConfigTime(ConfigTime) returns (google.protobuf.Empty) {}
// Standardzeit bei manueller Bewässerung abrufen
rpc GetDefaultIrrigationTime(google.protobuf.Empty) returns (google.protobuf.Int32Value) {}
// Standardzeit bei manueller Bewässerung einstellen
rpc SetDefaultIrrigationTime(google.protobuf.Int32Value) returns (google.protobuf.Empty) {}
}
// Quelle einer Bewässerungsaufgabe
enum TaskSource {
MANUAL = 0;
SCHEDULE = 1;
}
// Unix-Zeitstempel
message Timestamp {
int64 seconds = 1;
}
message TaskRequest {
TaskSource source = 1;
int32 zone_id = 2;
int32 duration = 3;
bool queuing = 4;
bool cancelling = 5;
}
message TaskRequestResult {
bool started = 1;
bool stopped = 2;
}
message Task {
TaskSource source = 1;
int32 zone_id = 2;
int32 duration = 3;
Timestamp datetime_started = 4;
Timestamp datetime_finished = 5;
}
message TaskList {
repeated Task tasks = 1;
Timestamp now = 2;
}
message Job {
int32 id = 1;
Timestamp date = 2;
int32 duration = 3;
repeated int32 zones = 4;
bool enable = 5;
bool repeat = 6;
}
message JobID {
int32 id = 1;
}
message JobList {
repeated Job jobs = 1;
}
message ConfigTime {
Timestamp datetime = 1;
string timezone = 2;
}

View file

@ -2,3 +2,5 @@ tinydb~=4.6.1
cyra~=1.0.2 cyra~=1.0.2
schedule~=1.1.0 schedule~=1.1.0
smbus~=1.1.post2 smbus~=1.1.post2
protobuf~=3.19.3
grpcio~=1.43.0

4
requirements_dev.txt Normal file
View file

@ -0,0 +1,4 @@
grpcio-tools~=1.43.0
mypy-protobuf~=3.2.0
types-protobuf~=3.19.6
grpc-stubs~=1.24.7

View file

@ -22,10 +22,14 @@ setuptools.setup(
], ],
py_modules=['tsgrain_controller'], py_modules=['tsgrain_controller'],
install_requires=[ install_requires=[
'tinydb', 'tinydb~=4.6.1',
'cyra', 'cyra~=1.0.2',
'schedule', 'schedule~=1.1.0',
'smbus~=1.1.post2',
'protobuf~=3.19.3',
'grpcio~=1.43.0',
], ],
extras_require={'rpi': ['RPi.GPIO~=0.7.0']},
packages=setuptools.find_packages(exclude=['tests*']), packages=setuptools.find_packages(exclude=['tests*']),
entry_points={ entry_points={
'console_scripts': [ 'console_scripts': [

View file

@ -1,9 +1,9 @@
# coding=utf-8 # coding=utf-8
from typing import Optional, Callable, Dict from typing import Dict
from importlib_resources import files from importlib_resources import files
import os import os
from tsgrain_controller import io, util, config from tsgrain_controller import io, config, models
DIR_TESTFILES = str(files('tests.testfiles').joinpath('')) DIR_TESTFILES = str(files('tests.testfiles').joinpath(''))
FILE_CFG = os.path.join(DIR_TESTFILES, 'tsgrain.toml') FILE_CFG = os.path.join(DIR_TESTFILES, 'tsgrain.toml')
@ -11,26 +11,15 @@ FILE_CFG = os.path.join(DIR_TESTFILES, 'tsgrain.toml')
class TestingIo(io.Io): class TestingIo(io.Io):
def __init__(self, cb_manual: Optional[Callable[[int], None]], def __init__(self):
cb_mode: Optional[Callable[[], None]]): super().__init__()
self.cb_manual = cb_manual
self.cb_mode = cb_mode
self.outputs: Dict[str, bool] = dict() self.outputs: Dict[str, bool] = dict()
def trigger_cb_manual(self, zone_id: int):
if self.cb_manual is not None:
self.cb_manual(zone_id)
def trigger_cb_mode(self):
if self.cb_mode is not None:
self.cb_mode()
def write_output(self, key: str, val: bool): def write_output(self, key: str, val: bool):
self.outputs[key] = val self.outputs[key] = val
class TestingApp(util.AppInterface): class TestingApp(models.AppInterface):
def __init__(self, db_file=''): def __init__(self, db_file=''):
self.auto = False self.auto = False
@ -38,7 +27,7 @@ class TestingApp(util.AppInterface):
self.cfg.load_file(False) self.cfg.load_file(False)
self.cfg.db_path = db_file self.cfg.db_path = db_file
def is_auto_enabled(self) -> bool: def get_auto_mode(self) -> bool:
return self.auto return self.auto
def get_cfg(self) -> config.Config: def get_cfg(self) -> config.Config:

152
tests/test_application.py Normal file
View file

@ -0,0 +1,152 @@
# coding=utf-8
import tempfile
import pytest
import time
from datetime import datetime
from tsgrain_controller import application, models
from tests import fixtures
JOB1_DATA = {
'date': datetime(2022, 1, 10, 12, 30),
'duration': 60,
'enable': True,
'repeat': False,
'zones': [1, 3]
}
JOB2_DATA = {
'date': datetime(2022, 1, 11, 18, 0),
'duration': 300,
'enable': True,
'repeat': True,
'zones': [5]
}
@pytest.fixture
def app(mocker):
mocker.patch('tsgrain_controller.io.io_factory.new_io',
return_value=fixtures.TestingIo())
with tempfile.TemporaryDirectory() as td:
app = application.Application(True, td, fixtures.FILE_CFG)
assert not app.is_running()
app.start()
assert app.is_running()
yield app
app.stop()
assert not app.is_running()
def test_start_task(app):
# Manually start a task (like via button press)
res = app.start_task(
models.TaskRequest(source=models.Source.MANUAL,
zone_id=2,
duration=30,
queuing=False,
cancelling=True))
assert res.started
assert not res.stopped
# Queue processing time
time.sleep(0.1)
# Try to start the same task again -> nothing happens
res = app.start_task(
models.TaskRequest(source=models.Source.MANUAL,
zone_id=2,
duration=30,
queuing=False,
cancelling=False))
assert not res.started
assert not res.stopped
# Try to start the same task again -> task should cancel
res = app.start_task(
models.TaskRequest(source=models.Source.MANUAL,
zone_id=2,
duration=30,
queuing=False,
cancelling=True))
assert not res.started
assert res.stopped
def test_start_task_queue(app):
# Manually start a task (like via button press)
res = app.start_task(
models.TaskRequest(source=models.Source.MANUAL,
zone_id=2,
duration=30,
queuing=False,
cancelling=True))
assert res.started
assert not res.stopped
assert len(app.queue.tasks) == 1
assert app.queue.tasks[0].zone_id == 2
# Duplicate task should not be enqueued
res = app.start_task(
models.TaskRequest(source=models.Source.MANUAL,
zone_id=2,
duration=30,
queuing=True,
cancelling=False))
assert not res.started
assert not res.stopped
assert len(app.queue.tasks) == 1
# Enqueue a new task
res = app.start_task(
models.TaskRequest(source=models.Source.MANUAL,
zone_id=3,
duration=30,
queuing=True,
cancelling=False))
assert res.started
assert not res.stopped
tasks = app.get_tasks()
assert len(tasks) == 2
assert tasks[0].zone_id == 2
assert tasks[1].zone_id == 3
def test_crud_job(app):
# Insert jobs
job1 = models.Job.deserialize(JOB1_DATA)
job2 = models.Job.deserialize(JOB2_DATA)
assert app.create_job(job1) == job1.id == 1
assert app.create_job(job2) == job2.id == 2
# Get a job
got_job = app.get_job(1)
assert got_job.id == 1
assert got_job.serialize() == JOB1_DATA
# Get all jobs
got_jobs = app.get_jobs()
assert len(got_jobs) == 2
assert got_jobs[0].zones == [1, 3]
assert got_jobs[1].zones == [5]
# Update job
job2.zones = [4, 5]
app.update_job(job2)
assert app.get_job(2).zones == [4, 5]
# Delete job
app.delete_job(1)
assert len(app.get_jobs()) == 1
# Get job that does not exist
with pytest.raises(KeyError):
app.get_job(1)
# Delete job that does not exist
with pytest.raises(KeyError):
app.delete_job(1)

View file

@ -1,63 +1,109 @@
# coding=utf-8 # coding=utf-8
import tempfile import tempfile
import os import os
import pytest
from datetime import datetime
from tests import fixtures from tests import fixtures
from tsgrain_controller import database, util, queue, models from tsgrain_controller import database, util, task_queue, models
JOB1_DATA = {
'date': datetime(2022, 1, 10, 12, 30),
'duration': 60,
'enable': True,
'repeat': False,
'zones': [1, 3]
}
JOB2_DATA = {
'date': datetime(2022, 1, 11, 18, 0),
'duration': 300,
'enable': True,
'repeat': True,
'zones': [5]
}
def test_db_init(): @pytest.fixture
with tempfile.TemporaryDirectory() as td: def db():
dbfile = os.path.join(td, 'raindb.json')
db = database.RainDB(dbfile)
db.close()
assert os.path.isfile(dbfile)
def test_db_jobs():
with tempfile.TemporaryDirectory() as td: with tempfile.TemporaryDirectory() as td:
dbfile = os.path.join(td, 'raindb.json') dbfile = os.path.join(td, 'raindb.json')
db = database.RainDB(dbfile) db = database.RainDB(dbfile)
job1 = models.Job(util.datetime_new(2022, 1, 10, 12, 30), 60, [1, 3], job1 = models.Job.deserialize(JOB1_DATA)
True, False) job2 = models.Job.deserialize(JOB2_DATA)
job2 = models.Job(util.datetime_new(2022, 1, 10, 12, 30), 60, [1, 3],
True, True)
db.insert_job(job1) db.insert_job(job1)
db.insert_job(job2) db.insert_job(job2)
# Reopen database assert job1.id == 1
assert job2.id == 2
yield db
db.close() db.close()
db = database.RainDB(dbfile) assert os.path.isfile(dbfile)
jobs = db.get_jobs()
assert util.to_json(jobs) == '{"1": {"date": "2022-01-10T12:30:00", \
"duration": 60, "zones": [1, 3], "enable": [1, 3], "repeat": false}, "2": \
{"date": "2022-01-10T12:30:00", "duration": 60, "zones": [1, 3], \
"enable": [1, 3], "repeat": true}}'
job2.enable = False
db.update_job(2, job2)
db.delete_job(1)
jobs = db.get_jobs()
assert util.to_json(jobs) == '{"2": {"date": "2022-01-10T12:30:00", \
"duration": 60, "zones": [1, 3], "enable": [1, 3], "repeat": true}}'
def test_db_queue(): def test_get_jobs(db):
app = fixtures.TestingApp() jobs = db.get_jobs()
q = queue.TaskQueue(app) assert len(jobs) == 2
assert jobs[0].serialize() == JOB1_DATA
assert jobs[1].serialize() == JOB2_DATA
task1 = models.Task(models.Source.MANUAL, 1, 10)
task2 = models.Task(models.Source.SCHEDULE, 1, 5)
assert q.enqueue(task1) def test_get_job(db):
assert q.enqueue(task2) job1 = db.get_job(1)
assert job1.id == 1
assert job1.serialize() == JOB1_DATA
job2 = db.get_job(2)
assert job2.id == 2
assert job2.serialize() == JOB2_DATA
with pytest.raises(KeyError):
db.get_job(3)
def test_insert_job(db):
job3 = models.Job(util.datetime_new(2022, 1, 15, 15, 30), 250, [6], True,
True)
assert db.insert_job(job3) == 3
assert job3.id == 3
def test_update_job(db):
job1 = db.get_job(1)
job1.zones = [1, 2, 3]
db.update_job(job1)
got_job1 = db.get_job(1)
assert got_job1.id == job1.id == 1
assert got_job1.zones == [1, 2, 3]
def test_delete_job(db):
db.delete_job(1)
with pytest.raises(KeyError):
db.get_job(1)
assert len(db.get_jobs()) == 1
with pytest.raises(KeyError):
db.delete_job(3)
def test_queue():
with tempfile.TemporaryDirectory() as td: with tempfile.TemporaryDirectory() as td:
app = fixtures.TestingApp(td)
q = task_queue.TaskQueue(app)
task1 = models.Task(models.Source.MANUAL, 1, 10)
task2 = models.Task(models.Source.SCHEDULE, 1, 5)
assert q.enqueue(task1)
assert q.enqueue(task2)
dbfile = os.path.join(td, 'raindb.json') dbfile = os.path.join(td, 'raindb.json')
app = fixtures.TestingApp(dbfile) app = fixtures.TestingApp(dbfile)
@ -68,7 +114,7 @@ def test_db_queue():
db.close() db.close()
db = database.RainDB(dbfile) db = database.RainDB(dbfile)
read_queue = queue.TaskQueue(app) read_queue = task_queue.TaskQueue(app)
db.load_queue(read_queue) db.load_queue(read_queue)
assert util.to_json( assert util.to_json(
@ -77,17 +123,8 @@ def test_db_queue():
"remaining": 5}]' "remaining": 5}]'
def test_db_auto_mode(): def test_auto_mode(db):
with tempfile.TemporaryDirectory() as td: assert db.get_auto_mode() is False
dbfile = os.path.join(td, 'raindb.json')
db = database.RainDB(dbfile) db.set_auto_mode(True)
assert db.get_auto_mode() is False assert db.get_auto_mode() is True
db.set_auto_mode(True)
assert db.get_auto_mode() is True
# Reopen database
db.close()
db = database.RainDB(dbfile)
assert db.get_auto_mode() is True

171
tests/test_grpc_server.py Normal file
View file

@ -0,0 +1,171 @@
# coding=utf-8
import tempfile
import time
from datetime import datetime
import pytest
import grpc
from google.protobuf import empty_pb2
from tsgrain_controller import application, models, util
from tsgrain_controller.grpc_generated import tsgrain_pb2, tsgrain_pb2_grpc
from tests import fixtures
JOB1_DATA = {
'date': datetime(2022, 1, 10, 12, 30),
'duration': 60,
'enable': True,
'repeat': False,
'zones': [1, 3]
}
JOB2_DATA = {
'date': datetime(2022, 1, 11, 18, 0),
'duration': 300,
'enable': True,
'repeat': True,
'zones': [5]
}
@pytest.fixture
def app(mocker):
mocker.patch('tsgrain_controller.io.io_factory.new_io',
return_value=fixtures.TestingIo())
with tempfile.TemporaryDirectory() as td:
app = application.Application(True, td, fixtures.FILE_CFG)
assert not app.is_running()
app.start()
assert app.is_running()
yield app
app.stop()
assert not app.is_running()
@pytest.fixture
def grpc_client(app):
with grpc.insecure_channel('localhost:50051') as channel:
client = tsgrain_pb2_grpc.TSGRainStub(channel)
yield client
def test_start_task(grpc_client):
# Manually start a task (like via button press)
res = grpc_client.StartTask(
tsgrain_pb2.TaskRequest(source=tsgrain_pb2.TaskSource.MANUAL,
zone_id=2,
duration=30,
queuing=False,
cancelling=True))
assert res.started
assert not res.stopped
# Queue processing time
time.sleep(0.1)
# Try to start the same task again -> nothing happens
res = grpc_client.StartTask(
tsgrain_pb2.TaskRequest(source=tsgrain_pb2.TaskSource.MANUAL,
zone_id=2,
duration=30,
queuing=False,
cancelling=False))
assert not res.started
assert not res.stopped
# Try to start the same task again -> task should cancel
res = grpc_client.StartTask(
tsgrain_pb2.TaskRequest(source=tsgrain_pb2.TaskSource.MANUAL,
zone_id=2,
duration=30,
queuing=False,
cancelling=True))
assert not res.started
assert res.stopped
def test_start_task_queue(grpc_client):
# Manually start a task (like via button press)
res = grpc_client.StartTask(
tsgrain_pb2.TaskRequest(source=tsgrain_pb2.TaskSource.MANUAL,
zone_id=2,
duration=30,
queuing=False,
cancelling=True))
assert res.started
assert not res.stopped
task_list = grpc_client.GetTasks(empty_pb2.Empty())
assert task_list.now.seconds == pytest.approx(util.datetime_to_proto(
datetime.now()).seconds,
abs=1)
assert len(task_list.tasks) == 1
assert task_list.tasks[0].zone_id == 2
# Duplicate task should not be enqueued
res = grpc_client.StartTask(
tsgrain_pb2.TaskRequest(source=tsgrain_pb2.TaskSource.MANUAL,
zone_id=2,
duration=30,
queuing=True,
cancelling=False))
assert not res.started
assert not res.stopped
task_list = grpc_client.GetTasks(empty_pb2.Empty())
assert len(task_list.tasks) == 1
# Enqueue a new task
res = grpc_client.StartTask(
tsgrain_pb2.TaskRequest(source=tsgrain_pb2.TaskSource.MANUAL,
zone_id=3,
duration=30,
queuing=True,
cancelling=False))
assert res.started
assert not res.stopped
task_list = grpc_client.GetTasks(empty_pb2.Empty())
assert len(task_list.tasks) == 2
assert task_list.tasks[0].zone_id == 2
assert task_list.tasks[1].zone_id == 3
def test_crud_job(grpc_client):
# Insert jobs
job1 = models.Job.deserialize(JOB1_DATA)
job2 = models.Job.deserialize(JOB2_DATA)
assert grpc_client.CreateJob(job1.serialize_proto()).id == 1
assert grpc_client.CreateJob(job2.serialize_proto()).id == 2
# Get a job
got_job = grpc_client.GetJob(tsgrain_pb2.JobID(id=1))
assert got_job.id == 1
assert models.Job.deserialize_proto(got_job).serialize() == JOB1_DATA
# Get all jobs
job_list = grpc_client.GetJobs(empty_pb2.Empty())
assert len(job_list.jobs) == 2
assert job_list.jobs[0].zones == [1, 3]
assert job_list.jobs[1].zones == [5]
# Update job
job2.id = 2
job2.zones = [4, 5]
grpc_client.UpdateJob(job2.serialize_proto())
assert grpc_client.GetJob(tsgrain_pb2.JobID(id=2)).zones == [4, 5]
# Delete job
grpc_client.DeleteJob(tsgrain_pb2.JobID(id=1))
assert len(grpc_client.GetJobs(empty_pb2.Empty()).jobs) == 1
# Get job that does not exist
with pytest.raises(grpc.RpcError):
grpc_client.GetJob(tsgrain_pb2.JobID(id=1))
# Delete job that does not exist
with pytest.raises(grpc.RpcError):
grpc_client.DeleteJob(tsgrain_pb2.JobID(id=1))

View file

@ -5,10 +5,10 @@ import defer
from unittest import mock from unittest import mock
from tests import fixtures from tests import fixtures
from tsgrain_controller import output, queue, models from tsgrain_controller import output, task_queue, models
class _TaskHolder(queue.TaskHolder): class _TaskHolder(task_queue.TaskHolder):
def __init__(self, task: Optional[models.Task]): def __init__(self, task: Optional[models.Task]):
self.task = task self.task = task
@ -19,14 +19,14 @@ class _TaskHolder(queue.TaskHolder):
@defer.with_defer @defer.with_defer
def test_zone_outputs(): def test_zone_outputs():
io = fixtures.TestingIo(None, None) io = fixtures.TestingIo()
task_holder = _TaskHolder(None) task_holder = _TaskHolder(None)
app = fixtures.TestingApp() app = fixtures.TestingApp()
op = output.Outputs(io, task_holder, app) op = output.Outputs(io, task_holder, app)
op.start() op.start()
defer.defer(op.stop) defer.defer(op.stop)
time.sleep(0.01) time.sleep(0.02)
# Expect all devices initialized to OFF # Expect all devices initialized to OFF
assert io.outputs == { assert io.outputs == {
@ -41,7 +41,7 @@ def test_zone_outputs():
} }
task_holder.task = models.Task(models.Source.MANUAL, 1, 100) task_holder.task = models.Task(models.Source.MANUAL, 1, 100)
time.sleep(0.01) time.sleep(0.02)
assert io.outputs == { assert io.outputs == {
'VALVE_1': True, 'VALVE_1': True,
'VALVE_2': False, 'VALVE_2': False,
@ -55,7 +55,7 @@ def test_zone_outputs():
} }
task_holder.task = None task_holder.task = None
time.sleep(0.01) time.sleep(0.02)
assert io.outputs == { assert io.outputs == {
'VALVE_1': False, 'VALVE_1': False,
'VALVE_2': False, 'VALVE_2': False,

View file

@ -3,7 +3,7 @@ import time
from datetime import datetime from datetime import datetime
from tests import fixtures from tests import fixtures
from tsgrain_controller import queue, util, models from tsgrain_controller import task_queue, util, models
def test_task(): def test_task():
@ -50,7 +50,7 @@ def test_task_started(mocker):
def test_add_tasks(): def test_add_tasks():
app = fixtures.TestingApp() app = fixtures.TestingApp()
q = queue.TaskQueue(app) q = task_queue.TaskQueue(app)
task1 = models.Task(models.Source.MANUAL, 1, 10) task1 = models.Task(models.Source.MANUAL, 1, 10)
task1b = models.Task(models.Source.SCHEDULE, 1, 5) task1b = models.Task(models.Source.SCHEDULE, 1, 5)
@ -63,7 +63,7 @@ def test_add_tasks():
def test_queue_runner(): def test_queue_runner():
app = fixtures.TestingApp() app = fixtures.TestingApp()
q = queue.TaskQueue(app) q = task_queue.TaskQueue(app)
task1 = models.Task(models.Source.MANUAL, 1, 1) task1 = models.Task(models.Source.MANUAL, 1, 1)
task2 = models.Task(models.Source.MANUAL, 2, 5) task2 = models.Task(models.Source.MANUAL, 2, 5)

View file

@ -1 +1,38 @@
n_zones = 3 # Anzahl Bewässerungszonen n_zones = 3 # Anzahl Bewässerungszonen
db_path = "" # Pfad der Datenbankdatei
manual_time = 300 # Manuelle Bewaesserungszeit in Sekunden
grpc_port = "[::]:50051" # Port für GRPC-Kommunikation
log_debug = false # Debug-Ausgaben loggen
[io]
i2c_bus_id = 0 # ID des I2C-Bus
gpio_interrupt = 17 # GPIO-Pin, mit dem der Interrupt-Pin des MCP23017 verbunden ist
gpio_delay = 0.05 # Entprellzeit in Sekunden
[output_devices] # Ausgaenge
VALVE_1 = "0x27/B0/!"
VALVE_2 = "0x27/B1/!"
VALVE_3 = "0x27/B2/!"
VALVE_4 = "0x27/B3/!"
VALVE_5 = "0x27/B4/!"
VALVE_6 = "0x27/B5/!"
VALVE_7 = "0x27/B6/!"
LED_Z_1 = "0x27/A0"
LED_Z_2 = "0x27/A1"
LED_Z_3 = "0x27/A2"
LED_Z_4 = "0x27/A3"
LED_Z_5 = "0x27/A4"
LED_Z_6 = "0x27/A5"
LED_Z_7 = "0x27/A6"
LED_M_AUTO = "0x23/B0"
LED_M_MAN = "0x23/B1"
[input_devices] # Eingaenge
BT_Z_1 = "0x23/A0/!"
BT_Z_2 = "0x23/A1/!"
BT_Z_3 = "0x23/A2/!"
BT_Z_4 = "0x23/A3/!"
BT_Z_5 = "0x23/A4/!"
BT_Z_6 = "0x23/A5/!"
BT_Z_7 = "0x23/A6/!"
BT_MODE = "0x23/A7/!"

20
tox.ini
View file

@ -11,11 +11,11 @@ commands =
pytest tests pytest tests
[testenv:lint] [testenv:lint]
description = Lint with flake8 description = Lint with pylint
skip_install = true deps =
deps = flake8 pylint
commands = commands =
flake8 tsgrain_controller --count --max-complexity=10 --max-line-length=100 --show-source --statistics pylint tsgrain_controller
[testenv:coverage] [testenv:coverage]
description = Output HTML coverage report. description = Output HTML coverage report.
@ -33,3 +33,15 @@ deps = bump2version
skip_install = true skip_install = true
commands = commands =
bump2version {posargs} bump2version {posargs}
[testenv:protoc]
description = Generate protobuf code
deps =
-r{toxinidir}/requirements_dev.txt
skip_install = true
commands =
python -m grpc_tools.protoc {toxinidir}/proto/tsgrain.proto \
--python_out {toxinidir} \
--grpc_python_out {toxinidir} \
--mypy_out {toxinidir} \
--proto_path tsgrain_controller/grpc_generated=./proto/

View file

@ -8,13 +8,13 @@ def run():
console_flag = False console_flag = False
if len(sys.argv) > 1: if len(sys.argv) > 1:
x = sys.argv[1] arg1 = sys.argv[1]
if x.startswith('c'): if arg1.startswith('c'):
console_flag = True console_flag = True
app = application.Application(console_flag) app = application.Application(console_flag)
def _signal_handler(sig, frame): def _signal_handler(sig, frame): # pylint: disable=unused-argument
app.stop() app.stop()
print('Exited.') print('Exited.')

View file

@ -1,37 +1,57 @@
# coding=utf-8 # coding=utf-8
import logging import logging
import os.path
from typing import List, Optional
from tsgrain_controller import config, controller, database, io, jobschedule, output, \ from tsgrain_controller import config, database, jobschedule, output, task_queue, \
queue, util models, grpc_server
from tsgrain_controller.io import io_factory
class Application(util.AppInterface): class Application(models.AppInterface):
def __init__(self,
console_flag: bool,
workdir: str = '',
cfg_path: Optional[str] = None):
if cfg_path is None:
cfg_path = os.path.join(workdir, 'tsgrain.toml')
def __init__(self, console_flag: bool):
self.logger = logging.getLogger() self.logger = logging.getLogger()
self.logger.setLevel(logging.DEBUG) self.logger.setLevel(logging.DEBUG)
self.cfg = config.Config('tsgrain.toml') self.cfg = config.Config(cfg_path)
self.cfg.load_file() self.cfg.load_file()
self.db = database.RainDB(self.cfg.db_path) if self.cfg.db_path:
db_path = self.cfg.db_path
else:
db_path = os.path.join(workdir, 'raindb.json')
self.queue = queue.TaskQueue(self) self.db = database.RainDB(db_path)
self.queue = task_queue.TaskQueue(self)
self.db.load_queue(self.queue) self.db.load_queue(self.queue)
b_ctrl = controller.ButtonController(self.queue, self.cfg) self.io = io_factory.new_io(self, console_flag)
self.io.set_callbacks(self._cb_manual, self._cb_modekey)
self.io = io.new_io(self, console_flag)
self.io.set_callbacks(b_ctrl.cb_manual, self.cb_modekey)
self.outputs = output.Outputs(self.io, self.queue, self) self.outputs = output.Outputs(self.io, self.queue, self)
self.scheduler = jobschedule.Scheduler(self.db, self.queue) self.scheduler = jobschedule.Scheduler(self)
self.auto_en = self.db.get_auto_mode() self.grpc_tsgrain_servicer = grpc_server.TSGRainServicer(self)
self.grpc_server = grpc_server.new_server(self.grpc_tsgrain_servicer,
self.cfg.grpc_port)
def is_auto_enabled(self) -> bool: self._auto_en = self.db.get_auto_mode()
return self.auto_en self._running = False
def get_auto_mode(self) -> bool:
return self._auto_en
def set_auto_mode(self, state: bool):
self._auto_en = state
def get_cfg(self) -> config.Config: def get_cfg(self) -> config.Config:
return self.cfg return self.cfg
@ -39,21 +59,83 @@ class Application(util.AppInterface):
def get_logger(self) -> logging.Logger: def get_logger(self) -> logging.Logger:
return self.logger return self.logger
def cb_modekey(self): def _cb_manual(self, zone_id: int):
self.auto_en = not self.auto_en self.start_task(
models.TaskRequest(source=models.Source.MANUAL,
zone_id=zone_id,
duration=self.cfg.manual_time,
queuing=False,
cancelling=True))
if self.auto_en: def _cb_modekey(self):
self._auto_en = not self._auto_en
if self._auto_en:
logging.info('Auto mode ON') logging.info('Auto mode ON')
else: else:
logging.info('Auto mode OFF') logging.info('Auto mode OFF')
def start_task(self,
request: models.TaskRequest) -> models.TaskRequestResult:
if request.queuing:
if request.cancelling:
# If a task from this zone is in queue, cancel it
for task in self.queue.tasks:
if task.zone_id == request.zone_id and task.source == request.source:
self.queue.cancel_task(task)
return models.TaskRequestResult(False, True)
elif request.cancelling:
current_task = self.queue.get_current_task()
# Cancel manually started tasks
if current_task is not None \
and current_task.zone_id == request.zone_id \
and current_task.source == request.source:
self.queue.cancel_current_task()
return models.TaskRequestResult(False, True)
duration = request.duration
if duration < 1:
duration = self.cfg.manual_time
task = models.Task(models.Source.MANUAL, request.zone_id, duration)
started = self.queue.enqueue(task, not request.queuing)
return models.TaskRequestResult(started, False)
def get_tasks(self) -> List[models.Task]:
return self.queue.tasks
def create_job(self, job: models.Job) -> int:
return self.db.insert_job(job)
def get_job(self, job_id: int) -> models.Job:
return self.db.get_job(job_id)
def get_jobs(self) -> List[models.Job]:
return self.db.get_jobs()
def update_job(self, job: models.Job):
self.db.update_job(job)
def delete_job(self, job_id):
self.db.delete_job(job_id)
def notify_queue_update(self):
self.grpc_tsgrain_servicer.queue_update.set()
def is_running(self) -> bool:
return self._running
def start(self): def start(self):
self._running = True
self.io.start() self.io.start()
self.outputs.start() self.outputs.start()
self.queue.start() self.queue.start()
self.scheduler.start() self.scheduler.start()
self.grpc_server.start()
def stop(self): def stop(self):
self._running = False
self.grpc_server.stop(None)
self.scheduler.stop() self.scheduler.stop()
self.queue.stop() self.queue.stop()
self.outputs.stop() self.outputs.stop()
@ -61,4 +143,4 @@ class Application(util.AppInterface):
# Store application state # Store application state
self.db.store_queue(self.queue) self.db.store_queue(self.queue)
self.db.set_auto_mode(self.auto_en) self.db.set_auto_mode(self._auto_en)

View file

@ -9,11 +9,14 @@ class Config(cyra.Config):
n_zones = builder.define('n_zones', 7) n_zones = builder.define('n_zones', 7)
builder.comment('Pfad der Datenbankdatei') builder.comment('Pfad der Datenbankdatei')
db_path = builder.define('db_path', 'raindb.json') db_path = builder.define('db_path', '')
builder.comment('Manuelle Bewaesserungszeit in Sekunden') builder.comment('Manuelle Bewaesserungszeit in Sekunden')
manual_time = builder.define('manual_time', 300) manual_time = builder.define('manual_time', 300)
builder.comment('Port für GRPC-Kommunikation')
grpc_port = builder.define('grpc_port', '[::]:50051')
builder.comment('Debug-Ausgaben loggen') builder.comment('Debug-Ausgaben loggen')
log_debug = builder.define('log_debug', False) log_debug = builder.define('log_debug', False)

View file

@ -1,36 +0,0 @@
# coding=utf-8
from typing import Callable, Optional
from tsgrain_controller import config, models, queue
class ButtonController:
def __init__(self, task_queue: queue.TaskQueue, cfg: config.Config):
self.task_queue = task_queue
self.cfg = cfg
self.cb_error: Optional[Callable[[], None]] = None
def cb_manual(self, zone_id: int):
current_task = self.task_queue.get_current_task()
# Cancel manually started tasks
if current_task is not None \
and current_task.zone_id == zone_id and current_task.source == models.Source.MANUAL:
self.task_queue.cancel_current_task()
else:
task = models.Task(models.Source.MANUAL, zone_id,
self.cfg.manual_time)
self.task_queue.enqueue(task, True)
class QueuingButtonController(ButtonController):
def cb_manual(self, zone_id: int):
# If a task from this zone is in queue, cancel it
for task in self.task_queue.tasks:
if task.zone_id == zone_id and task.source == models.Source.MANUAL:
self.task_queue.cancel_task(task)
return
task = models.Task(models.Source.MANUAL, zone_id, self.cfg.manual_time)
self.task_queue.enqueue(task)

View file

@ -1,7 +1,7 @@
# coding=utf-8 # coding=utf-8
from typing import List
import tinydb import tinydb
from typing import Dict, Optional from tsgrain_controller import task_queue, util, models
from tsgrain_controller import queue, util, models
class RainDB: class RainDB:
@ -17,74 +17,78 @@ class RainDB:
"""Die Datenbank schließen""" """Die Datenbank schließen"""
self._db.close() self._db.close()
def get_jobs(self) -> Dict[int, models.Job]: def get_jobs(self) -> List[models.Job]:
""" """
Gibt alle gespeicherten Bewässerungsjobs zurück Gibt alle gespeicherten Bewässerungsjobs zurück
:return: Bewässerungsjobs: dict(id -> models.Job) :return: Bewässerungsjobs: dict(id -> models.Job)
""" """
res = dict() return [
for job_data in self._jobs.all(): models.Job.deserialize(job_data, job_data.doc_id)
res[job_data.doc_id] = models.Job.deserialize(job_data) for job_data in self._jobs.all()
return res ]
def get_job(self, job_id: int) -> Optional[models.Job]: def get_job(self, job_id: int) -> models.Job:
""" """
Gibt den Bewässerungsjob mit der gegebenen ID zurück Gibt den Bewässerungsjob mit der gegebenen ID zurück
:param job_id: ID des Bewässerungsjobs :param job_id: ID des Bewässerungsjobs
:return: Bewässerungsjob :return: Bewässerungsjob
:raise KeyError: wenn Job nicht gefunden
""" """
job = self._jobs.get(None, job_id) job = self._jobs.get(None, job_id)
if job is None: if job is None:
return None raise KeyError
return models.Job.deserialize(job) return models.Job.deserialize(job, job_id)
def insert_job(self, job: models.Job): def insert_job(self, job: models.Job) -> int:
""" """
Fügt der Datenbank einen neuen Bewässerungsjob hinzu Fügt der Datenbank einen neuen Bewässerungsjob hinzu
:param job: Bewässerungsjob :param job: Bewässerungsjob
:return: ID des neuen Bewässerungsjobs
""" """
self._jobs.insert(util.serializer(job)) job_id = self._jobs.insert(util.serializer(job))
job.id = job_id
return job_id
def update_job(self, job_id: int, job: models.Job): def update_job(self, job: models.Job):
""" """
Aktualisiert einen Bewässerungsjob Aktualisiert einen Bewässerungsjob
:param job_id: ID des Bewässerungsjobs
:param job: Bewässerungsjob :param job: Bewässerungsjob
""" """
self._jobs.update(util.serializer(job), None, [job_id]) self._jobs.update(util.serializer(job), None, [job.id])
def delete_job(self, job_id: int): def delete_job(self, job_id: int):
""" """
Lösche den Bewässerungsjob mit der gegebenen ID Lösche den Bewässerungsjob mit der gegebenen ID
:param job_id: ID des Bewässerungsjobs :param job_id: ID des Bewässerungsjobs
:raise KeyError: wenn Job nicht gefunden
""" """
self._jobs.remove(None, [job_id]) self._jobs.remove(None, [job_id])
def store_queue(self, task_queue: queue.TaskQueue): def store_queue(self, queue: task_queue.TaskQueue):
""" """
Speichere die aktuelle Warteschlange in der Datenbank Speichere die aktuelle Warteschlange in der Datenbank
:param task_queue: Warteschlange :param queue: Warteschlange
""" """
self.empty_queue() self.empty_queue()
for task in task_queue.serialize(): for task in queue.serialize():
self._tasks.insert(util.serializer(task)) self._tasks.insert(util.serializer(task))
def load_queue(self, task_queue: queue.TaskQueue): def load_queue(self, queue: task_queue.TaskQueue):
""" """
Lade die gespeicherten Tasks aus der Datenbank in die Warteschlange Lade die gespeicherten Tasks aus der Datenbank in die Warteschlange
:param task_queue: Warteschlange :param queue: Warteschlange
""" """
for task_data in self._tasks.all(): for task_data in self._tasks.all():
task = models.Task.deserialize(task_data) task = models.Task.deserialize(task_data)
task_queue.tasks.append(task) queue.tasks.append(task)
self.empty_queue() self.empty_queue()
def empty_queue(self): def empty_queue(self):

View file

@ -0,0 +1,700 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: tsgrain_controller/grpc_generated/tsgrain.proto
"""Generated protocol buffer code."""
from google.protobuf.internal import enum_type_wrapper
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2
from google.protobuf import wrappers_pb2 as google_dot_protobuf_dot_wrappers__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='tsgrain_controller/grpc_generated/tsgrain.proto',
package='',
syntax='proto3',
serialized_options=None,
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n/tsgrain_controller/grpc_generated/tsgrain.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a\x1egoogle/protobuf/wrappers.proto\"\x1c\n\tTimestamp\x12\x0f\n\x07seconds\x18\x01 \x01(\x03\"r\n\x0bTaskRequest\x12\x1b\n\x06source\x18\x01 \x01(\x0e\x32\x0b.TaskSource\x12\x0f\n\x07zone_id\x18\x02 \x01(\x05\x12\x10\n\x08\x64uration\x18\x03 \x01(\x05\x12\x0f\n\x07queuing\x18\x04 \x01(\x08\x12\x12\n\ncancelling\x18\x05 \x01(\x08\"5\n\x11TaskRequestResult\x12\x0f\n\x07started\x18\x01 \x01(\x08\x12\x0f\n\x07stopped\x18\x02 \x01(\x08\"\x93\x01\n\x04Task\x12\x1b\n\x06source\x18\x01 \x01(\x0e\x32\x0b.TaskSource\x12\x0f\n\x07zone_id\x18\x02 \x01(\x05\x12\x10\n\x08\x64uration\x18\x03 \x01(\x05\x12$\n\x10\x64\x61tetime_started\x18\x04 \x01(\x0b\x32\n.Timestamp\x12%\n\x11\x64\x61tetime_finished\x18\x05 \x01(\x0b\x32\n.Timestamp\"9\n\x08TaskList\x12\x14\n\x05tasks\x18\x01 \x03(\x0b\x32\x05.Task\x12\x17\n\x03now\x18\x02 \x01(\x0b\x32\n.Timestamp\"l\n\x03Job\x12\n\n\x02id\x18\x01 \x01(\x05\x12\x18\n\x04\x64\x61te\x18\x02 \x01(\x0b\x32\n.Timestamp\x12\x10\n\x08\x64uration\x18\x03 \x01(\x05\x12\r\n\x05zones\x18\x04 \x03(\x05\x12\x0e\n\x06\x65nable\x18\x05 \x01(\x08\x12\x0e\n\x06repeat\x18\x06 \x01(\x08\"\x13\n\x05JobID\x12\n\n\x02id\x18\x01 \x01(\x05\"\x1d\n\x07JobList\x12\x12\n\x04jobs\x18\x01 \x03(\x0b\x32\x04.Job\"<\n\nConfigTime\x12\x1c\n\x08\x64\x61tetime\x18\x01 \x01(\x0b\x32\n.Timestamp\x12\x10\n\x08timezone\x18\x02 \x01(\t*&\n\nTaskSource\x12\n\n\x06MANUAL\x10\x00\x12\x0c\n\x08SCHEDULE\x10\x01\x32\x83\x06\n\x07TSGRain\x12/\n\tStartTask\x12\x0c.TaskRequest\x1a\x12.TaskRequestResult\"\x00\x12/\n\x08GetTasks\x12\x16.google.protobuf.Empty\x1a\t.TaskList\"\x00\x12\x34\n\x0bStreamTasks\x12\x16.google.protobuf.Empty\x1a\t.TaskList\"\x00\x30\x01\x12\x1b\n\tCreateJob\x12\x04.Job\x1a\x06.JobID\"\x00\x12\x18\n\x06GetJob\x12\x06.JobID\x1a\x04.Job\"\x00\x12-\n\x07GetJobs\x12\x16.google.protobuf.Empty\x1a\x08.JobList\"\x00\x12+\n\tUpdateJob\x12\x04.Job\x1a\x16.google.protobuf.Empty\"\x00\x12-\n\tDeleteJob\x12\x06.JobID\x1a\x16.google.protobuf.Empty\"\x00\x12\x43\n\x0bGetAutoMode\x12\x16.google.protobuf.Empty\x1a\x1a.google.protobuf.BoolValue\"\x00\x12\x43\n\x0bSetAutoMode\x12\x1a.google.protobuf.BoolValue\x1a\x16.google.protobuf.Empty\"\x00\x12\x36\n\rGetConfigTime\x12\x16.google.protobuf.Empty\x1a\x0b.ConfigTime\"\x00\x12\x36\n\rSetConfigTime\x12\x0b.ConfigTime\x1a\x16.google.protobuf.Empty\"\x00\x12Q\n\x18GetDefaultIrrigationTime\x12\x16.google.protobuf.Empty\x1a\x1b.google.protobuf.Int32Value\"\x00\x12Q\n\x18SetDefaultIrrigationTime\x12\x1b.google.protobuf.Int32Value\x1a\x16.google.protobuf.Empty\"\x00\x62\x06proto3'
,
dependencies=[google_dot_protobuf_dot_empty__pb2.DESCRIPTOR,google_dot_protobuf_dot_wrappers__pb2.DESCRIPTOR,])
_TASKSOURCE = _descriptor.EnumDescriptor(
name='TaskSource',
full_name='TaskSource',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='MANUAL', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='SCHEDULE', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=746,
serialized_end=784,
)
_sym_db.RegisterEnumDescriptor(_TASKSOURCE)
TaskSource = enum_type_wrapper.EnumTypeWrapper(_TASKSOURCE)
MANUAL = 0
SCHEDULE = 1
_TIMESTAMP = _descriptor.Descriptor(
name='Timestamp',
full_name='Timestamp',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='seconds', full_name='Timestamp.seconds', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=112,
serialized_end=140,
)
_TASKREQUEST = _descriptor.Descriptor(
name='TaskRequest',
full_name='TaskRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='source', full_name='TaskRequest.source', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='zone_id', full_name='TaskRequest.zone_id', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='duration', full_name='TaskRequest.duration', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='queuing', full_name='TaskRequest.queuing', index=3,
number=4, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='cancelling', full_name='TaskRequest.cancelling', index=4,
number=5, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=142,
serialized_end=256,
)
_TASKREQUESTRESULT = _descriptor.Descriptor(
name='TaskRequestResult',
full_name='TaskRequestResult',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='started', full_name='TaskRequestResult.started', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='stopped', full_name='TaskRequestResult.stopped', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=258,
serialized_end=311,
)
_TASK = _descriptor.Descriptor(
name='Task',
full_name='Task',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='source', full_name='Task.source', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='zone_id', full_name='Task.zone_id', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='duration', full_name='Task.duration', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='datetime_started', full_name='Task.datetime_started', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='datetime_finished', full_name='Task.datetime_finished', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=314,
serialized_end=461,
)
_TASKLIST = _descriptor.Descriptor(
name='TaskList',
full_name='TaskList',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='tasks', full_name='TaskList.tasks', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='now', full_name='TaskList.now', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=463,
serialized_end=520,
)
_JOB = _descriptor.Descriptor(
name='Job',
full_name='Job',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='Job.id', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='date', full_name='Job.date', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='duration', full_name='Job.duration', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='zones', full_name='Job.zones', index=3,
number=4, type=5, cpp_type=1, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='enable', full_name='Job.enable', index=4,
number=5, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='repeat', full_name='Job.repeat', index=5,
number=6, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=522,
serialized_end=630,
)
_JOBID = _descriptor.Descriptor(
name='JobID',
full_name='JobID',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='JobID.id', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=632,
serialized_end=651,
)
_JOBLIST = _descriptor.Descriptor(
name='JobList',
full_name='JobList',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='jobs', full_name='JobList.jobs', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=653,
serialized_end=682,
)
_CONFIGTIME = _descriptor.Descriptor(
name='ConfigTime',
full_name='ConfigTime',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='datetime', full_name='ConfigTime.datetime', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='timezone', full_name='ConfigTime.timezone', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=684,
serialized_end=744,
)
_TASKREQUEST.fields_by_name['source'].enum_type = _TASKSOURCE
_TASK.fields_by_name['source'].enum_type = _TASKSOURCE
_TASK.fields_by_name['datetime_started'].message_type = _TIMESTAMP
_TASK.fields_by_name['datetime_finished'].message_type = _TIMESTAMP
_TASKLIST.fields_by_name['tasks'].message_type = _TASK
_TASKLIST.fields_by_name['now'].message_type = _TIMESTAMP
_JOB.fields_by_name['date'].message_type = _TIMESTAMP
_JOBLIST.fields_by_name['jobs'].message_type = _JOB
_CONFIGTIME.fields_by_name['datetime'].message_type = _TIMESTAMP
DESCRIPTOR.message_types_by_name['Timestamp'] = _TIMESTAMP
DESCRIPTOR.message_types_by_name['TaskRequest'] = _TASKREQUEST
DESCRIPTOR.message_types_by_name['TaskRequestResult'] = _TASKREQUESTRESULT
DESCRIPTOR.message_types_by_name['Task'] = _TASK
DESCRIPTOR.message_types_by_name['TaskList'] = _TASKLIST
DESCRIPTOR.message_types_by_name['Job'] = _JOB
DESCRIPTOR.message_types_by_name['JobID'] = _JOBID
DESCRIPTOR.message_types_by_name['JobList'] = _JOBLIST
DESCRIPTOR.message_types_by_name['ConfigTime'] = _CONFIGTIME
DESCRIPTOR.enum_types_by_name['TaskSource'] = _TASKSOURCE
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
Timestamp = _reflection.GeneratedProtocolMessageType('Timestamp', (_message.Message,), {
'DESCRIPTOR' : _TIMESTAMP,
'__module__' : 'tsgrain_controller.grpc_generated.tsgrain_pb2'
# @@protoc_insertion_point(class_scope:Timestamp)
})
_sym_db.RegisterMessage(Timestamp)
TaskRequest = _reflection.GeneratedProtocolMessageType('TaskRequest', (_message.Message,), {
'DESCRIPTOR' : _TASKREQUEST,
'__module__' : 'tsgrain_controller.grpc_generated.tsgrain_pb2'
# @@protoc_insertion_point(class_scope:TaskRequest)
})
_sym_db.RegisterMessage(TaskRequest)
TaskRequestResult = _reflection.GeneratedProtocolMessageType('TaskRequestResult', (_message.Message,), {
'DESCRIPTOR' : _TASKREQUESTRESULT,
'__module__' : 'tsgrain_controller.grpc_generated.tsgrain_pb2'
# @@protoc_insertion_point(class_scope:TaskRequestResult)
})
_sym_db.RegisterMessage(TaskRequestResult)
Task = _reflection.GeneratedProtocolMessageType('Task', (_message.Message,), {
'DESCRIPTOR' : _TASK,
'__module__' : 'tsgrain_controller.grpc_generated.tsgrain_pb2'
# @@protoc_insertion_point(class_scope:Task)
})
_sym_db.RegisterMessage(Task)
TaskList = _reflection.GeneratedProtocolMessageType('TaskList', (_message.Message,), {
'DESCRIPTOR' : _TASKLIST,
'__module__' : 'tsgrain_controller.grpc_generated.tsgrain_pb2'
# @@protoc_insertion_point(class_scope:TaskList)
})
_sym_db.RegisterMessage(TaskList)
Job = _reflection.GeneratedProtocolMessageType('Job', (_message.Message,), {
'DESCRIPTOR' : _JOB,
'__module__' : 'tsgrain_controller.grpc_generated.tsgrain_pb2'
# @@protoc_insertion_point(class_scope:Job)
})
_sym_db.RegisterMessage(Job)
JobID = _reflection.GeneratedProtocolMessageType('JobID', (_message.Message,), {
'DESCRIPTOR' : _JOBID,
'__module__' : 'tsgrain_controller.grpc_generated.tsgrain_pb2'
# @@protoc_insertion_point(class_scope:JobID)
})
_sym_db.RegisterMessage(JobID)
JobList = _reflection.GeneratedProtocolMessageType('JobList', (_message.Message,), {
'DESCRIPTOR' : _JOBLIST,
'__module__' : 'tsgrain_controller.grpc_generated.tsgrain_pb2'
# @@protoc_insertion_point(class_scope:JobList)
})
_sym_db.RegisterMessage(JobList)
ConfigTime = _reflection.GeneratedProtocolMessageType('ConfigTime', (_message.Message,), {
'DESCRIPTOR' : _CONFIGTIME,
'__module__' : 'tsgrain_controller.grpc_generated.tsgrain_pb2'
# @@protoc_insertion_point(class_scope:ConfigTime)
})
_sym_db.RegisterMessage(ConfigTime)
_TSGRAIN = _descriptor.ServiceDescriptor(
name='TSGRain',
full_name='TSGRain',
file=DESCRIPTOR,
index=0,
serialized_options=None,
create_key=_descriptor._internal_create_key,
serialized_start=787,
serialized_end=1558,
methods=[
_descriptor.MethodDescriptor(
name='StartTask',
full_name='TSGRain.StartTask',
index=0,
containing_service=None,
input_type=_TASKREQUEST,
output_type=_TASKREQUESTRESULT,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='GetTasks',
full_name='TSGRain.GetTasks',
index=1,
containing_service=None,
input_type=google_dot_protobuf_dot_empty__pb2._EMPTY,
output_type=_TASKLIST,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='StreamTasks',
full_name='TSGRain.StreamTasks',
index=2,
containing_service=None,
input_type=google_dot_protobuf_dot_empty__pb2._EMPTY,
output_type=_TASKLIST,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='CreateJob',
full_name='TSGRain.CreateJob',
index=3,
containing_service=None,
input_type=_JOB,
output_type=_JOBID,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='GetJob',
full_name='TSGRain.GetJob',
index=4,
containing_service=None,
input_type=_JOBID,
output_type=_JOB,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='GetJobs',
full_name='TSGRain.GetJobs',
index=5,
containing_service=None,
input_type=google_dot_protobuf_dot_empty__pb2._EMPTY,
output_type=_JOBLIST,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='UpdateJob',
full_name='TSGRain.UpdateJob',
index=6,
containing_service=None,
input_type=_JOB,
output_type=google_dot_protobuf_dot_empty__pb2._EMPTY,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='DeleteJob',
full_name='TSGRain.DeleteJob',
index=7,
containing_service=None,
input_type=_JOBID,
output_type=google_dot_protobuf_dot_empty__pb2._EMPTY,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='GetAutoMode',
full_name='TSGRain.GetAutoMode',
index=8,
containing_service=None,
input_type=google_dot_protobuf_dot_empty__pb2._EMPTY,
output_type=google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='SetAutoMode',
full_name='TSGRain.SetAutoMode',
index=9,
containing_service=None,
input_type=google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE,
output_type=google_dot_protobuf_dot_empty__pb2._EMPTY,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='GetConfigTime',
full_name='TSGRain.GetConfigTime',
index=10,
containing_service=None,
input_type=google_dot_protobuf_dot_empty__pb2._EMPTY,
output_type=_CONFIGTIME,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='SetConfigTime',
full_name='TSGRain.SetConfigTime',
index=11,
containing_service=None,
input_type=_CONFIGTIME,
output_type=google_dot_protobuf_dot_empty__pb2._EMPTY,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='GetDefaultIrrigationTime',
full_name='TSGRain.GetDefaultIrrigationTime',
index=12,
containing_service=None,
input_type=google_dot_protobuf_dot_empty__pb2._EMPTY,
output_type=google_dot_protobuf_dot_wrappers__pb2._INT32VALUE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='SetDefaultIrrigationTime',
full_name='TSGRain.SetDefaultIrrigationTime',
index=13,
containing_service=None,
input_type=google_dot_protobuf_dot_wrappers__pb2._INT32VALUE,
output_type=google_dot_protobuf_dot_empty__pb2._EMPTY,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
])
_sym_db.RegisterServiceDescriptor(_TSGRAIN)
DESCRIPTOR.services_by_name['TSGRain'] = _TSGRAIN
# @@protoc_insertion_point(module_scope)

View file

@ -0,0 +1,189 @@
"""
@generated by mypy-protobuf. Do not edit manually!
isort:skip_file
"""
import builtins
import google.protobuf.descriptor
import google.protobuf.internal.containers
import google.protobuf.internal.enum_type_wrapper
import google.protobuf.message
import typing
import typing_extensions
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
class _TaskSource:
ValueType = typing.NewType('ValueType', builtins.int)
V: typing_extensions.TypeAlias = ValueType
class _TaskSourceEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_TaskSource.ValueType], builtins.type):
DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor
MANUAL: _TaskSource.ValueType # 0
SCHEDULE: _TaskSource.ValueType # 1
class TaskSource(_TaskSource, metaclass=_TaskSourceEnumTypeWrapper):
"""Quelle einer Bewässerungsaufgabe"""
pass
MANUAL: TaskSource.ValueType # 0
SCHEDULE: TaskSource.ValueType # 1
global___TaskSource = TaskSource
class Timestamp(google.protobuf.message.Message):
"""Unix-Zeitstempel"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
SECONDS_FIELD_NUMBER: builtins.int
seconds: builtins.int
def __init__(self,
*,
seconds: builtins.int = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["seconds",b"seconds"]) -> None: ...
global___Timestamp = Timestamp
class TaskRequest(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
SOURCE_FIELD_NUMBER: builtins.int
ZONE_ID_FIELD_NUMBER: builtins.int
DURATION_FIELD_NUMBER: builtins.int
QUEUING_FIELD_NUMBER: builtins.int
CANCELLING_FIELD_NUMBER: builtins.int
source: global___TaskSource.ValueType
zone_id: builtins.int
duration: builtins.int
queuing: builtins.bool
cancelling: builtins.bool
def __init__(self,
*,
source: global___TaskSource.ValueType = ...,
zone_id: builtins.int = ...,
duration: builtins.int = ...,
queuing: builtins.bool = ...,
cancelling: builtins.bool = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["cancelling",b"cancelling","duration",b"duration","queuing",b"queuing","source",b"source","zone_id",b"zone_id"]) -> None: ...
global___TaskRequest = TaskRequest
class TaskRequestResult(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
STARTED_FIELD_NUMBER: builtins.int
STOPPED_FIELD_NUMBER: builtins.int
started: builtins.bool
stopped: builtins.bool
def __init__(self,
*,
started: builtins.bool = ...,
stopped: builtins.bool = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["started",b"started","stopped",b"stopped"]) -> None: ...
global___TaskRequestResult = TaskRequestResult
class Task(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
SOURCE_FIELD_NUMBER: builtins.int
ZONE_ID_FIELD_NUMBER: builtins.int
DURATION_FIELD_NUMBER: builtins.int
DATETIME_STARTED_FIELD_NUMBER: builtins.int
DATETIME_FINISHED_FIELD_NUMBER: builtins.int
source: global___TaskSource.ValueType
zone_id: builtins.int
duration: builtins.int
@property
def datetime_started(self) -> global___Timestamp: ...
@property
def datetime_finished(self) -> global___Timestamp: ...
def __init__(self,
*,
source: global___TaskSource.ValueType = ...,
zone_id: builtins.int = ...,
duration: builtins.int = ...,
datetime_started: typing.Optional[global___Timestamp] = ...,
datetime_finished: typing.Optional[global___Timestamp] = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["datetime_finished",b"datetime_finished","datetime_started",b"datetime_started"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["datetime_finished",b"datetime_finished","datetime_started",b"datetime_started","duration",b"duration","source",b"source","zone_id",b"zone_id"]) -> None: ...
global___Task = Task
class TaskList(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
TASKS_FIELD_NUMBER: builtins.int
NOW_FIELD_NUMBER: builtins.int
@property
def tasks(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Task]: ...
@property
def now(self) -> global___Timestamp: ...
def __init__(self,
*,
tasks: typing.Optional[typing.Iterable[global___Task]] = ...,
now: typing.Optional[global___Timestamp] = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["now",b"now"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["now",b"now","tasks",b"tasks"]) -> None: ...
global___TaskList = TaskList
class Job(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
ID_FIELD_NUMBER: builtins.int
DATE_FIELD_NUMBER: builtins.int
DURATION_FIELD_NUMBER: builtins.int
ZONES_FIELD_NUMBER: builtins.int
ENABLE_FIELD_NUMBER: builtins.int
REPEAT_FIELD_NUMBER: builtins.int
id: builtins.int
@property
def date(self) -> global___Timestamp: ...
duration: builtins.int
@property
def zones(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: ...
enable: builtins.bool
repeat: builtins.bool
def __init__(self,
*,
id: builtins.int = ...,
date: typing.Optional[global___Timestamp] = ...,
duration: builtins.int = ...,
zones: typing.Optional[typing.Iterable[builtins.int]] = ...,
enable: builtins.bool = ...,
repeat: builtins.bool = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["date",b"date"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["date",b"date","duration",b"duration","enable",b"enable","id",b"id","repeat",b"repeat","zones",b"zones"]) -> None: ...
global___Job = Job
class JobID(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
ID_FIELD_NUMBER: builtins.int
id: builtins.int
def __init__(self,
*,
id: builtins.int = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["id",b"id"]) -> None: ...
global___JobID = JobID
class JobList(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
JOBS_FIELD_NUMBER: builtins.int
@property
def jobs(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Job]: ...
def __init__(self,
*,
jobs: typing.Optional[typing.Iterable[global___Job]] = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["jobs",b"jobs"]) -> None: ...
global___JobList = JobList
class ConfigTime(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
DATETIME_FIELD_NUMBER: builtins.int
TIMEZONE_FIELD_NUMBER: builtins.int
@property
def datetime(self) -> global___Timestamp: ...
timezone: typing.Text
def __init__(self,
*,
datetime: typing.Optional[global___Timestamp] = ...,
timezone: typing.Text = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["datetime",b"datetime"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["datetime",b"datetime","timezone",b"timezone"]) -> None: ...
global___ConfigTime = ConfigTime

View file

@ -0,0 +1,513 @@
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
"""Client and server classes corresponding to protobuf-defined services."""
import grpc
from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2
from google.protobuf import wrappers_pb2 as google_dot_protobuf_dot_wrappers__pb2
from tsgrain_controller.grpc_generated import tsgrain_pb2 as tsgrain__controller_dot_grpc__generated_dot_tsgrain__pb2
class TSGRainStub(object):
"""Missing associated documentation comment in .proto file."""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.StartTask = channel.unary_unary(
'/TSGRain/StartTask',
request_serializer=tsgrain__controller_dot_grpc__generated_dot_tsgrain__pb2.TaskRequest.SerializeToString,
response_deserializer=tsgrain__controller_dot_grpc__generated_dot_tsgrain__pb2.TaskRequestResult.FromString,
)
self.GetTasks = channel.unary_unary(
'/TSGRain/GetTasks',
request_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
response_deserializer=tsgrain__controller_dot_grpc__generated_dot_tsgrain__pb2.TaskList.FromString,
)
self.StreamTasks = channel.unary_stream(
'/TSGRain/StreamTasks',
request_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
response_deserializer=tsgrain__controller_dot_grpc__generated_dot_tsgrain__pb2.TaskList.FromString,
)
self.CreateJob = channel.unary_unary(
'/TSGRain/CreateJob',
request_serializer=tsgrain__controller_dot_grpc__generated_dot_tsgrain__pb2.Job.SerializeToString,
response_deserializer=tsgrain__controller_dot_grpc__generated_dot_tsgrain__pb2.JobID.FromString,
)
self.GetJob = channel.unary_unary(
'/TSGRain/GetJob',
request_serializer=tsgrain__controller_dot_grpc__generated_dot_tsgrain__pb2.JobID.SerializeToString,
response_deserializer=tsgrain__controller_dot_grpc__generated_dot_tsgrain__pb2.Job.FromString,
)
self.GetJobs = channel.unary_unary(
'/TSGRain/GetJobs',
request_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
response_deserializer=tsgrain__controller_dot_grpc__generated_dot_tsgrain__pb2.JobList.FromString,
)
self.UpdateJob = channel.unary_unary(
'/TSGRain/UpdateJob',
request_serializer=tsgrain__controller_dot_grpc__generated_dot_tsgrain__pb2.Job.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.DeleteJob = channel.unary_unary(
'/TSGRain/DeleteJob',
request_serializer=tsgrain__controller_dot_grpc__generated_dot_tsgrain__pb2.JobID.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.GetAutoMode = channel.unary_unary(
'/TSGRain/GetAutoMode',
request_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
response_deserializer=google_dot_protobuf_dot_wrappers__pb2.BoolValue.FromString,
)
self.SetAutoMode = channel.unary_unary(
'/TSGRain/SetAutoMode',
request_serializer=google_dot_protobuf_dot_wrappers__pb2.BoolValue.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.GetConfigTime = channel.unary_unary(
'/TSGRain/GetConfigTime',
request_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
response_deserializer=tsgrain__controller_dot_grpc__generated_dot_tsgrain__pb2.ConfigTime.FromString,
)
self.SetConfigTime = channel.unary_unary(
'/TSGRain/SetConfigTime',
request_serializer=tsgrain__controller_dot_grpc__generated_dot_tsgrain__pb2.ConfigTime.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.GetDefaultIrrigationTime = channel.unary_unary(
'/TSGRain/GetDefaultIrrigationTime',
request_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
response_deserializer=google_dot_protobuf_dot_wrappers__pb2.Int32Value.FromString,
)
self.SetDefaultIrrigationTime = channel.unary_unary(
'/TSGRain/SetDefaultIrrigationTime',
request_serializer=google_dot_protobuf_dot_wrappers__pb2.Int32Value.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
class TSGRainServicer(object):
"""Missing associated documentation comment in .proto file."""
def StartTask(self, request, context):
"""Starte eine neue Bewässerungsaufgabe (oder stoppe eine laufende, wenn
diese bereits läuft).
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetTasks(self, request, context):
"""Gibt sämtliche in der Warteschlange befindlichen Bewässerungsaufgaben zurück.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def StreamTasks(self, request, context):
"""Streamt die aktuelle Warteschlange mit ihren Bewässerungsaufgaben,
d.h. bei Änderung wird die neue Version übertragen.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def CreateJob(self, request, context):
"""Erstelle einen neuen Bewässerungsjob.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetJob(self, request, context):
"""Gibt den Bewässerungsjob mit der gegebenen ID zurück.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetJobs(self, request, context):
"""Gibt alle gespeicherten Bewässerungsjobs zurück.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def UpdateJob(self, request, context):
"""Aktualisiert einen Bewässerungsjob.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def DeleteJob(self, request, context):
"""Lösche den Bewässerungsjob mit der gegebenen ID.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetAutoMode(self, request, context):
"""Gibt zurück, ob der Automatikmodus aktiviert ist.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def SetAutoMode(self, request, context):
"""Aktiviert/deaktiviert den Automatikmodus.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetConfigTime(self, request, context):
"""Datum/Uhrzeit/Zeitzone abrufen
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def SetConfigTime(self, request, context):
"""Datum/Uhrzeit/Zeitzone einstellen
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetDefaultIrrigationTime(self, request, context):
"""Standardzeit bei manueller Bewässerung abrufen
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def SetDefaultIrrigationTime(self, request, context):
"""Standardzeit bei manueller Bewässerung einstellen
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_TSGRainServicer_to_server(servicer, server):
rpc_method_handlers = {
'StartTask': grpc.unary_unary_rpc_method_handler(
servicer.StartTask,
request_deserializer=tsgrain__controller_dot_grpc__generated_dot_tsgrain__pb2.TaskRequest.FromString,
response_serializer=tsgrain__controller_dot_grpc__generated_dot_tsgrain__pb2.TaskRequestResult.SerializeToString,
),
'GetTasks': grpc.unary_unary_rpc_method_handler(
servicer.GetTasks,
request_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
response_serializer=tsgrain__controller_dot_grpc__generated_dot_tsgrain__pb2.TaskList.SerializeToString,
),
'StreamTasks': grpc.unary_stream_rpc_method_handler(
servicer.StreamTasks,
request_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
response_serializer=tsgrain__controller_dot_grpc__generated_dot_tsgrain__pb2.TaskList.SerializeToString,
),
'CreateJob': grpc.unary_unary_rpc_method_handler(
servicer.CreateJob,
request_deserializer=tsgrain__controller_dot_grpc__generated_dot_tsgrain__pb2.Job.FromString,
response_serializer=tsgrain__controller_dot_grpc__generated_dot_tsgrain__pb2.JobID.SerializeToString,
),
'GetJob': grpc.unary_unary_rpc_method_handler(
servicer.GetJob,
request_deserializer=tsgrain__controller_dot_grpc__generated_dot_tsgrain__pb2.JobID.FromString,
response_serializer=tsgrain__controller_dot_grpc__generated_dot_tsgrain__pb2.Job.SerializeToString,
),
'GetJobs': grpc.unary_unary_rpc_method_handler(
servicer.GetJobs,
request_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
response_serializer=tsgrain__controller_dot_grpc__generated_dot_tsgrain__pb2.JobList.SerializeToString,
),
'UpdateJob': grpc.unary_unary_rpc_method_handler(
servicer.UpdateJob,
request_deserializer=tsgrain__controller_dot_grpc__generated_dot_tsgrain__pb2.Job.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'DeleteJob': grpc.unary_unary_rpc_method_handler(
servicer.DeleteJob,
request_deserializer=tsgrain__controller_dot_grpc__generated_dot_tsgrain__pb2.JobID.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'GetAutoMode': grpc.unary_unary_rpc_method_handler(
servicer.GetAutoMode,
request_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
response_serializer=google_dot_protobuf_dot_wrappers__pb2.BoolValue.SerializeToString,
),
'SetAutoMode': grpc.unary_unary_rpc_method_handler(
servicer.SetAutoMode,
request_deserializer=google_dot_protobuf_dot_wrappers__pb2.BoolValue.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'GetConfigTime': grpc.unary_unary_rpc_method_handler(
servicer.GetConfigTime,
request_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
response_serializer=tsgrain__controller_dot_grpc__generated_dot_tsgrain__pb2.ConfigTime.SerializeToString,
),
'SetConfigTime': grpc.unary_unary_rpc_method_handler(
servicer.SetConfigTime,
request_deserializer=tsgrain__controller_dot_grpc__generated_dot_tsgrain__pb2.ConfigTime.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'GetDefaultIrrigationTime': grpc.unary_unary_rpc_method_handler(
servicer.GetDefaultIrrigationTime,
request_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
response_serializer=google_dot_protobuf_dot_wrappers__pb2.Int32Value.SerializeToString,
),
'SetDefaultIrrigationTime': grpc.unary_unary_rpc_method_handler(
servicer.SetDefaultIrrigationTime,
request_deserializer=google_dot_protobuf_dot_wrappers__pb2.Int32Value.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'TSGRain', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
# This class is part of an EXPERIMENTAL API.
class TSGRain(object):
"""Missing associated documentation comment in .proto file."""
@staticmethod
def StartTask(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/TSGRain/StartTask',
tsgrain__controller_dot_grpc__generated_dot_tsgrain__pb2.TaskRequest.SerializeToString,
tsgrain__controller_dot_grpc__generated_dot_tsgrain__pb2.TaskRequestResult.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def GetTasks(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/TSGRain/GetTasks',
google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
tsgrain__controller_dot_grpc__generated_dot_tsgrain__pb2.TaskList.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def StreamTasks(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_stream(request, target, '/TSGRain/StreamTasks',
google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
tsgrain__controller_dot_grpc__generated_dot_tsgrain__pb2.TaskList.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def CreateJob(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/TSGRain/CreateJob',
tsgrain__controller_dot_grpc__generated_dot_tsgrain__pb2.Job.SerializeToString,
tsgrain__controller_dot_grpc__generated_dot_tsgrain__pb2.JobID.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def GetJob(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/TSGRain/GetJob',
tsgrain__controller_dot_grpc__generated_dot_tsgrain__pb2.JobID.SerializeToString,
tsgrain__controller_dot_grpc__generated_dot_tsgrain__pb2.Job.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def GetJobs(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/TSGRain/GetJobs',
google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
tsgrain__controller_dot_grpc__generated_dot_tsgrain__pb2.JobList.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def UpdateJob(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/TSGRain/UpdateJob',
tsgrain__controller_dot_grpc__generated_dot_tsgrain__pb2.Job.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def DeleteJob(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/TSGRain/DeleteJob',
tsgrain__controller_dot_grpc__generated_dot_tsgrain__pb2.JobID.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def GetAutoMode(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/TSGRain/GetAutoMode',
google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
google_dot_protobuf_dot_wrappers__pb2.BoolValue.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def SetAutoMode(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/TSGRain/SetAutoMode',
google_dot_protobuf_dot_wrappers__pb2.BoolValue.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def GetConfigTime(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/TSGRain/GetConfigTime',
google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
tsgrain__controller_dot_grpc__generated_dot_tsgrain__pb2.ConfigTime.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def SetConfigTime(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/TSGRain/SetConfigTime',
tsgrain__controller_dot_grpc__generated_dot_tsgrain__pb2.ConfigTime.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def GetDefaultIrrigationTime(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/TSGRain/GetDefaultIrrigationTime',
google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
google_dot_protobuf_dot_wrappers__pb2.Int32Value.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def SetDefaultIrrigationTime(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/TSGRain/SetDefaultIrrigationTime',
google_dot_protobuf_dot_wrappers__pb2.Int32Value.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)

View file

@ -0,0 +1,151 @@
# coding=utf-8
import threading
from concurrent import futures
from typing import Generator
import logging
import grpc
from google.protobuf import empty_pb2, wrappers_pb2
from tsgrain_controller import models, util
from tsgrain_controller.grpc_generated import tsgrain_pb2, tsgrain_pb2_grpc
task_request = tsgrain_pb2.TaskRequest()
task_request.zone_id = 2
task_request.duration = 30
class TSGRainServicer(tsgrain_pb2_grpc.TSGRainServicer):
def __init__(self, app: models.AppInterface):
self.app = app
self.queue_update = threading.Event()
def StartTask(self, request: tsgrain_pb2.TaskRequest,
context) -> tsgrain_pb2.Task:
logging.info('GRPC StartTask: %s', request)
res = self.app.start_task(
models.TaskRequest.deserialize_proto(request))
return res.serialize_proto()
def _get_tasks(self) -> tsgrain_pb2.TaskList:
app_tasks = self.app.get_tasks()
tasks = [task.serialize_proto() for task in app_tasks]
return tsgrain_pb2.TaskList(now=util.datetime_to_proto(
util.datetime_now()),
tasks=tasks)
def GetTasks(self, request: empty_pb2.Empty,
context) -> tsgrain_pb2.TaskList:
logging.info('GRPC GetTasks: %s', request)
return self._get_tasks()
def StreamTasks(self, request: empty_pb2.Empty,
context) -> Generator[tsgrain_pb2.TaskList, None, None]:
logging.info('GRPC StreamTasks: %s', request)
yield self._get_tasks()
self.queue_update.clear()
while self.app.is_running():
if self.queue_update.wait(1):
yield self._get_tasks()
self.queue_update.clear()
def CreateJob(self, request: tsgrain_pb2.Job,
context) -> tsgrain_pb2.JobID:
logging.info('GRPC CreateJob: %s', request)
job = models.Job.deserialize_proto(request)
job_id = self.app.create_job(job)
return tsgrain_pb2.JobID(id=job_id)
def GetJob(self, request: tsgrain_pb2.JobID, context) -> tsgrain_pb2.Job:
logging.info('GRPC GetJob: %s', request)
try:
job = self.app.get_job(request.id)
except KeyError:
context.set_code(grpc.StatusCode.NOT_FOUND)
raise
return job.serialize_proto()
def GetJobs(self, request: empty_pb2.Empty,
context) -> tsgrain_pb2.JobList:
logging.info('GRPC GetJobs: %s', request)
jobs = [job.serialize_proto() for job in self.app.get_jobs()]
return tsgrain_pb2.JobList(jobs=jobs)
def UpdateJob(self, request: tsgrain_pb2.Job, context) -> empty_pb2.Empty:
logging.info('GRPC UpdateJob: %s', request)
job = models.Job.deserialize_proto(request)
self.app.update_job(job)
return empty_pb2.Empty()
def DeleteJob(self, request: tsgrain_pb2.JobID,
context) -> empty_pb2.Empty:
logging.info('GRPC DeleteJob: %s', request)
try:
self.app.delete_job(request.id)
except KeyError:
context.set_code(grpc.StatusCode.NOT_FOUND)
raise
return empty_pb2.Empty()
def GetAutoMode(self, request: empty_pb2.Empty,
context) -> wrappers_pb2.BoolValue:
logging.info('GRPC GetAutoMode: %s', request)
return wrappers_pb2.BoolValue(value=self.app.get_auto_mode())
def SetAutoMode(self, request: wrappers_pb2.BoolValue,
context) -> empty_pb2.Empty:
logging.info('GRPC SetAutoMode: %s', request)
self.app.set_auto_mode(request.value)
return empty_pb2.Empty()
def GetConfigTime(self, request: empty_pb2.Empty,
context) -> tsgrain_pb2.ConfigTime:
logging.info('GRPC GetConfigTime: %s', request)
# TODO: system time config
# return tsgrain_pb2.ConfigTime(
# datetime=util.datetime_to_proto(util.datetime_now()),
# timezone=
# )
return super().SetConfigTime(request, context)
def SetConfigTime(self, request: tsgrain_pb2.ConfigTime,
context) -> empty_pb2.Empty:
logging.info('GRPC SetConfigTime: %s', request)
return super().SetConfigTime(request, context)
def GetDefaultIrrigationTime(self, request: empty_pb2.Empty,
context) -> wrappers_pb2.Int32Value:
logging.info('GRPC GetDefaultIrrigationTime: %s', request)
return wrappers_pb2.Int32Value(value=self.app.get_cfg().manual_time)
def SetDefaultIrrigationTime(self, request: wrappers_pb2.Int32Value,
context) -> empty_pb2.Empty:
logging.info('GRPC SetDefaultIrrigationTime: %s', request)
self.app.get_cfg().manual_time = request.value
return empty_pb2.Empty()
def new_server(servicer: TSGRainServicer, port: str) -> grpc.Server:
server = grpc.server(futures.ThreadPoolExecutor(max_workers=2))
tsgrain_pb2_grpc.add_TSGRainServicer_to_server(servicer, server)
server.add_insecure_port(port)
return server

View file

@ -1,14 +1,25 @@
# coding=utf-8 # coding=utf-8
from typing import Callable, Optional from typing import Callable, Optional
from tsgrain_controller import util
class Io: class Io:
def __init__(self):
self.cb_manual: Optional[Callable[[int], None]] = None
self.cb_mode: Optional[Callable[[], None]] = None
def set_callbacks(self, cb_manual: Optional[Callable[[int], None]], def set_callbacks(self, cb_manual: Optional[Callable[[int], None]],
cb_mode: Optional[Callable[[], None]]): cb_mode: Optional[Callable[[], None]]):
pass self.cb_manual = cb_manual
self.cb_mode = cb_mode
def _trigger_cb_manual(self, zone_id: int):
if self.cb_manual is not None:
self.cb_manual(zone_id)
def _trigger_cb_mode(self):
if self.cb_mode is not None:
self.cb_mode()
def start(self): def start(self):
pass pass
@ -18,15 +29,3 @@ class Io:
def write_output(self, key: str, val: bool): def write_output(self, key: str, val: bool):
pass pass
def new_io(app: util.AppInterface, console_flag: bool) -> Io:
if not console_flag:
try:
from tsgrain_controller.io import mcp23017 as io_mod
except ImportError:
from tsgrain_controller.io import console as io_mod
else:
from tsgrain_controller.io import console as io_mod
return io_mod.Io(app)

View file

@ -2,9 +2,9 @@
import curses import curses
import logging import logging
import math import math
from typing import Callable, Dict, Optional from typing import Dict, Optional
from tsgrain_controller import io, util from tsgrain_controller import io, util, models
class CursesHandler(logging.Handler): class CursesHandler(logging.Handler):
@ -28,25 +28,18 @@ class CursesHandler(logging.Handler):
except (KeyboardInterrupt, SystemExit): except (KeyboardInterrupt, SystemExit):
raise raise
except: # noqa: E722 except: # pylint: disable=bare-except
self.handleError(record) self.handleError(record)
class Io(util.StoppableThread, io.Io): class Io(util.StoppableThread, io.Io):
def __init__(self, app: util.AppInterface): def __init__(self, app: models.AppInterface):
super().__init__(0.01) super().__init__(0.01)
self.app = app self.app = app
self.cb_manual: Optional[Callable[[int], None]] = None
self.cb_mode: Optional[Callable[[], None]] = None
self._screen: Optional = None self._screen: Optional = None
self._outputs: Dict[str, bool] = dict() self._outputs: Dict[str, bool] = {}
def set_callbacks(self, cb_manual: Optional[Callable[[int], None]],
cb_mode: Optional[Callable[[], None]]):
self.cb_manual = cb_manual
self.cb_mode = cb_mode
def setup(self): def setup(self):
screen = curses.initscr() screen = curses.initscr()
@ -68,16 +61,8 @@ class Io(util.StoppableThread, io.Io):
self._screen.nodelay(True) self._screen.nodelay(True)
logging.info('Window size: max_x=%d, max_y=%d, width_half=%d', max_x, logging.debug('Window size: max_x=%d, max_y=%d, width_half=%d', max_x,
max_y, width_half) max_y, width_half)
def _trigger_cb_manual(self, zone_id: int):
if self.cb_manual is not None:
self.cb_manual(zone_id)
def _trigger_cb_mode(self):
if self.cb_mode is not None:
self.cb_mode()
def write_output(self, key: str, val: bool): def write_output(self, key: str, val: bool):
self._outputs[key] = val self._outputs[key] = val
@ -88,8 +73,7 @@ class Io(util.StoppableThread, io.Io):
def state_str(state: bool) -> str: def state_str(state: bool) -> str:
if state: if state:
return '' return ''
else: return ''
return ''
# Mode key (0) # Mode key (0)
if c == 48: if c == 48:
@ -104,7 +88,7 @@ class Io(util.StoppableThread, io.Io):
i = 1 i = 1
for key, output in self._outputs.items(): for key, output in self._outputs.items():
self._screen.addstr(i, 0, '%s: %s' % (key, state_str(output))) self._screen.addstr(i, 0, f'{key}: {state_str(output)}')
i += 1 i += 1
def cleanup(self): def cleanup(self):

View file

@ -0,0 +1,15 @@
# coding=utf-8
from tsgrain_controller import models, io
# pylint: disable=import-outside-toplevel
def new_io(app: models.AppInterface, console_flag: bool) -> io.Io:
if not console_flag:
try:
from tsgrain_controller.io import mcp23017 as io_mod
except ImportError:
from tsgrain_controller.io import console as io_mod
else:
from tsgrain_controller.io import console as io_mod
return io_mod.Io(app)

View file

@ -5,10 +5,10 @@ from dataclasses import dataclass
from enum import Enum from enum import Enum
from typing import Callable, Dict, Optional, Tuple from typing import Callable, Dict, Optional, Tuple
import RPi.GPIO as GPIO from RPi import GPIO # pylint: disable=import-error
import smbus import smbus
from tsgrain_controller import io, util from tsgrain_controller import io, models
# MCP-Register # MCP-Register
# Quelle: https://ww1.microchip.com/downloads/en/devicedoc/20001952c.pdf # Quelle: https://ww1.microchip.com/downloads/en/devicedoc/20001952c.pdf
@ -193,7 +193,7 @@ die wiederum die als Output konfigurierten Pins ansteuern.
class PinConfigInvalid(Exception): class PinConfigInvalid(Exception):
def __init__(self, cfg_str: str): def __init__(self, cfg_str: str):
super().__init__('MCP23017 pin config %s invalid' % cfg_str) super().__init__(f'MCP23017 pin config {cfg_str} invalid')
class _MCP23017Port(Enum): class _MCP23017Port(Enum):
@ -223,18 +223,18 @@ class _MCP23017Device:
try: try:
i2c_addr = int(i2c_addr_str, 16) i2c_addr = int(i2c_addr_str, 16)
except ValueError: except ValueError as e:
raise PinConfigInvalid(cfg_str) raise PinConfigInvalid(cfg_str) from e
try: try:
port = _MCP23017Port[port_str] port = _MCP23017Port[port_str]
except KeyError: except KeyError as e:
raise PinConfigInvalid(cfg_str) raise PinConfigInvalid(cfg_str) from e
try: try:
pin = int(pin_str) pin = int(pin_str)
except ValueError: except ValueError as e:
raise PinConfigInvalid(cfg_str) raise PinConfigInvalid(cfg_str) from e
invert = False invert = False
@ -248,18 +248,20 @@ class _MCP23017Device:
class Io(io.Io): class Io(io.Io):
def __init__(self, app: util.AppInterface): def __init__(self, app: models.AppInterface):
super().__init__()
self.cb_manual: Optional[Callable[[int], None]] = None self.cb_manual: Optional[Callable[[int], None]] = None
self.cb_mode: Optional[Callable[[], None]] = None self.cb_mode: Optional[Callable[[], None]] = None
self.cfg = app.get_cfg() self.cfg = app.get_cfg()
self.bus = smbus.SMBus(self.cfg.i2c_bus_id) self.bus = smbus.SMBus(self.cfg.i2c_bus_id)
self.i2c_cache: Dict[Tuple[int, int], int] = dict() self.i2c_cache: Dict[Tuple[int, int], int] = {}
self.mcp_addresses = set() self.mcp_addresses = set()
self.output_devices: Dict[str, _MCP23017Device] = dict() self.output_devices: Dict[str, _MCP23017Device] = {}
self.input_devices: Dict[str, _MCP23017Device] = dict() self.input_devices: Dict[str, _MCP23017Device] = {}
# Parse config # Parse config
for key, cfg_str in self.cfg.input_devices.items(): for key, cfg_str in self.cfg.input_devices.items():
@ -272,19 +274,6 @@ class Io(io.Io):
self.mcp_addresses.add(device.i2c_address) self.mcp_addresses.add(device.i2c_address)
self.output_devices[key] = device self.output_devices[key] = device
def _trigger_cb_manual(self, zone_id: int):
if self.cb_manual is not None:
self.cb_manual(zone_id)
def _trigger_cb_mode(self):
if self.cb_mode is not None:
self.cb_mode()
def set_callbacks(self, cb_manual: Optional[Callable[[int], None]],
cb_mode: Optional[Callable[[], None]]):
self.cb_manual = cb_manual
self.cb_mode = cb_mode
def _i2c_read_byte(self, def _i2c_read_byte(self,
i2c_address: int, i2c_address: int,
register: int, register: int,
@ -326,7 +315,7 @@ class Io(io.Io):
self._i2c_write_byte(i2c_address, register, data) self._i2c_write_byte(i2c_address, register, data)
def _i2c_clear_cache(self): def _i2c_clear_cache(self):
self.i2c_cache = dict() self.i2c_cache = {}
def _configure_mcp(self, i2c_address: int): def _configure_mcp(self, i2c_address: int):
self._i2c_write_byte(i2c_address, MCP_IODIRA, 0xff) self._i2c_write_byte(i2c_address, MCP_IODIRA, 0xff)
@ -375,7 +364,7 @@ class Io(io.Io):
return None return None
def _read_inputs(self) -> Dict[str, bool]: def _read_inputs(self) -> Dict[str, bool]:
res = dict() res = {}
self._i2c_clear_cache() self._i2c_clear_cache()
for key, device in self.input_devices.items(): for key, device in self.input_devices.items():
@ -385,7 +374,7 @@ class Io(io.Io):
return res return res
def _interrupt_handler(self, int_pin: int): def _interrupt_handler(self, int_pin: int): # pylint: disable=unused-argument
key = self._read_interrupt() key = self._read_interrupt()
if key is None: if key is None:
return return

View file

@ -4,25 +4,26 @@ from typing import Optional
import schedule import schedule
from tsgrain_controller import database, models, queue, util from tsgrain_controller import models, util
class Scheduler(util.StoppableThread): class Scheduler(util.StoppableThread):
def __init__(self, db: database.RainDB, task_queue: queue.TaskQueue): def __init__(self, app: models.AppInterface):
super().__init__(1) super().__init__(1)
self.db = db self.app = app
self.queue = task_queue
self._job: Optional[schedule.Job] = None self._job: Optional[schedule.Job] = None
def _minute_handler(self): def _minute_handler(self):
jobs = self.db.get_jobs() jobs = self.app.get_jobs()
for job in jobs.values(): for job in jobs:
if job.check(datetime.now()): if job.check(datetime.now()):
for zone in job.zones: for zone in job.zones:
task = models.Task(models.Source.SCHEDULE, zone, self.app.start_task(
job.duration) models.TaskRequest(source=models.Source.SCHEDULE,
self.queue.enqueue(task) zone_id=zone,
queuing=True,
cancelling=False))
def start(self): def start(self):
self._job = schedule.every().minute.at(':00').do(self._minute_handler) self._job = schedule.every().minute.at(':00').do(self._minute_handler)

View file

@ -1,23 +1,46 @@
# coding=utf-8 # coding=utf-8
import logging
import uuid import uuid
from dataclasses import dataclass from dataclasses import dataclass
from datetime import datetime, timedelta from datetime import datetime, timedelta
from enum import Enum from enum import Enum
from typing import List, Optional, Dict, Any from typing import List, Optional, Dict, Any
from tsgrain_controller import util from tsgrain_controller import util, config
from tsgrain_controller.grpc_generated import tsgrain_pb2
@dataclass @dataclass
class Job: class Job:
"""Bewässerungsjob (Zeitsteuerung)"""
date: datetime date: datetime
"""Startdatum/Uhrzeit"""
duration: int duration: int
"""Bewässerungsdauer in Sekunden"""
zones: List[int] zones: List[int]
"""Liste der Zonen, die bewässert werden sollen"""
enable: bool enable: bool
"""Bewässerungsjob aktiviert"""
repeat: bool repeat: bool
"""Bewässerungsjob täglich wiederholen"""
id: int = 0
"""
ID, mit der der Bewässerungsjob in der Datenbank gespeichert wird.
Ist Null bei neu erstellten Objekten, die sich noch nicht in der Datenbank
befinden.
"""
@property @property
def is_active(self) -> bool: def is_active(self) -> bool:
"""
Gibt True zurück, wenn der Bewässerungsjob in Zukunft ausgeführt werden wird.
"""
if not self.enable: if not self.enable:
return False return False
if self.repeat: if self.repeat:
@ -26,6 +49,12 @@ class Job:
return self.date > util.datetime_now() return self.date > util.datetime_now()
def check(self, date_now: datetime) -> bool: def check(self, date_now: datetime) -> bool:
"""
Gibt True zurück, wenn der Bewässerungsjob in dieser Minute
gestartet werden soll.
:param date_now: Aktuelles Datum/Uhrzeit
"""
if not self.enable: if not self.enable:
return False return False
@ -37,24 +66,59 @@ class Job:
return date_now - check_datetime < timedelta(minutes=1) return date_now - check_datetime < timedelta(minutes=1)
def serialize(self) -> Dict[str, Any]:
return {
'date': self.date,
'duration': self.duration,
'zones': self.zones,
'enable': self.enable,
'repeat': self.repeat,
}
@classmethod @classmethod
def deserialize(cls, data: dict) -> 'Job': def deserialize(cls, data: dict, job_id: int = 0) -> 'Job':
return cls( return cls(
date=datetime.fromisoformat(data['date']), date=util.datetime_deserialize(data['date']),
duration=data['duration'], duration=data['duration'],
zones=data['zones'], zones=data['zones'],
enable=data['zones'], enable=data['enable'],
repeat=data['repeat'], repeat=data['repeat'],
id=job_id,
)
def serialize_proto(self) -> tsgrain_pb2.Job:
logging.info(self)
return tsgrain_pb2.Job(id=self.id,
date=util.datetime_to_proto(self.date),
duration=self.duration,
zones=self.zones,
enable=self.enable,
repeat=self.repeat)
@classmethod
def deserialize_proto(cls, data: tsgrain_pb2.Job) -> 'Job':
return cls(
date=util.datetime_from_proto(data.date),
duration=data.duration,
zones=list(data.zones),
enable=data.enable,
repeat=data.repeat,
id=data.id,
) )
class Source(Enum): class Source(Enum):
MANUAL = 2 """Quelle einer Bewässerungsaufgabe"""
MANUAL = 0
SCHEDULE = 1 SCHEDULE = 1
@dataclass @dataclass
class Task: class Task:
"""Bewässerungsaufgabe"""
source: Source source: Source
"""Quelle des Tasks (Manuell/Zeitplan)""" """Quelle des Tasks (Manuell/Zeitplan)"""
@ -145,6 +209,14 @@ class Task:
'datetime_finished': self.datetime_finished 'datetime_finished': self.datetime_finished
} }
def serialize_proto(self) -> tsgrain_pb2.Task:
return tsgrain_pb2.Task(
source=self.source.value,
zone_id=self.zone_id,
duration=self.duration,
datetime_started=util.datetime_to_proto(self.datetime_started),
datetime_finished=util.datetime_to_proto(self.datetime_finished))
@classmethod @classmethod
def deserialize(cls, data: dict) -> 'Task': def deserialize(cls, data: dict) -> 'Task':
task = cls(source=Source[data['source']], task = cls(source=Source[data['source']],
@ -160,5 +232,134 @@ class Task:
return hash(self._id) return hash(self._id)
def __str__(self): def __str__(self):
return 'ZONE %d: %ds (%s)' % (self.zone_id, self.duration, return f'ZONE {self.zone_id}: {self.duration} {self.source}'
self.source.name)
@dataclass
class TaskRequest:
"""Enthält alle Parameter einer zu erstellenden Bewässerungsaufgabe"""
source: Source
"""Quelle des Tasks (Manuell/Zeitplan)"""
zone_id: int
"""Nummer der Zone"""
duration: int = 0
"""Beregnungsdauer in Sekunden"""
queuing: bool = False
"""
Task in die Warteschlange einreihen, wenn er nicht sofort gestartet werden kann.
"""
cancelling: bool = False
"""Task aus der Warteschlange entfernen/abbrechen, wenn er bereits exisitiert."""
@classmethod
def deserialize_proto(cls, data: tsgrain_pb2.TaskRequest) -> 'TaskRequest':
return cls(source=Source(data.source),
zone_id=data.zone_id,
duration=data.duration,
queuing=data.queuing,
cancelling=data.cancelling)
@dataclass
class TaskRequestResult:
"""
Wird beim Versuch, eine neue Bewässerungsaufgabe zu starten, zurückgegeben.
Enthält die Information, ob eine Aufgabe gestartet oder gestoppt wurde.
"""
started: bool
"""True wenn eine neue Aufgabe gestartet wurde"""
stopped: bool
"""True wenn eine laufende Aufgabe gestoppt wurde"""
def serialize_proto(self) -> tsgrain_pb2.TaskRequestResult:
return tsgrain_pb2.TaskRequestResult(started=self.started,
stopped=self.stopped)
class AppInterface:
"""Beinhaltet sämtliche Methoden der TSGRain-Anwendung"""
def get_auto_mode(self) -> bool:
"""Gibt den Status des Automatikmodus zurück."""
def set_auto_mode(self, state: bool):
"""Aktiviert/deaktiviert den Automatikmodus."""
def get_cfg(self) -> config.Config:
"""Gibt das Konfigurationsobjekt der Anwendung zurück."""
def get_logger(self) -> logging.Logger:
"""Gibt den Logger der Anwendung zurück."""
def start_task(self, request: TaskRequest) -> TaskRequestResult:
"""
Starte eine neue Bewässerungsaufgabe (oder stoppe eine laufende, wenn
diese bereits läuft).
:param request: Objekt, dass die Parameter der neuen Aufgabe enthält.
:return: Statusobjekt (Information, ob eine Aufgabe gestartet oder gestoppt
wurde).
"""
def get_tasks(self) -> List[Task]:
"""
Gibt sämtliche in der Warteschlange befindlichen Bewässerungsaufgaben zurück.
:return: Liste von Bewässerungsaufgaben
"""
def create_job(self, job: Job) -> int:
"""
Erstelle einen neuen Bewässerungsjob.
:param job: Bewässerungsjob
:return: ID des neuen Bewässerungsjobs
"""
def get_job(self, job_id: int) -> Job:
"""
Gibt den Bewässerungsjob mit der gegebenen ID zurück.
:param job_id: ID des Bewässerungsjobs
:return: Bewässerungsjob
:raise KeyError: wenn Job nicht gefunden
"""
def get_jobs(self) -> List[Job]:
"""
Gibt alle gespeicherten Bewässerungsjobs zurück.
:return: Bewässerungsjobs: dict(id -> models.Job)
"""
def update_job(self, job: Job):
"""
Aktualisiert einen Bewässerungsjob.
:param job: Bewässerungsjob
"""
def delete_job(self, job_id):
"""
Lösche den Bewässerungsjob mit der gegebenen ID.
:param job_id: ID des Bewässerungsjobs
:raise KeyError: wenn Job nicht gefunden
"""
def notify_queue_update(self):
"""
Wird aufgerufen, wenn die Warteschlange aktualisiert wird.
Dient dazu, andere Komponenten der Anwendung zu benachrichtigen
(momentan den GRPC-Server).
"""
def is_running(self) -> bool:
"""Gibt den Status der Anwendung zurück"""

View file

@ -1,6 +1,6 @@
from dataclasses import dataclass from dataclasses import dataclass
from typing import Dict from typing import Dict
from tsgrain_controller import io, util, queue, models from tsgrain_controller import io, util, task_queue, models
@dataclass @dataclass
@ -22,7 +22,7 @@ class OutputState:
return 'OFF' return 'OFF'
if self.blink_freq == 0: if self.blink_freq == 0:
return 'ON' return 'ON'
return '((%d))' % self.blink_freq return f'(({self.blink_freq}))'
class OutputDevice: class OutputDevice:
@ -51,8 +51,8 @@ class Outputs(util.StoppableThread):
(Ventilausgänge und LEDs) (Ventilausgänge und LEDs)
""" """
def __init__(self, o_io: io.Io, task_holder: queue.TaskHolder, def __init__(self, o_io: io.Io, task_holder: task_queue.TaskHolder,
app: util.AppInterface): app: models.AppInterface):
super().__init__(0.01) super().__init__(0.01)
self.task_holder = task_holder self.task_holder = task_holder
@ -60,11 +60,11 @@ class Outputs(util.StoppableThread):
self.n_zones = self.app.get_cfg().n_zones self.n_zones = self.app.get_cfg().n_zones
self.valve_outputs: Dict[int, OutputDevice] = { self.valve_outputs: Dict[int, OutputDevice] = {
i: OutputDevice('VALVE_%d' % i, o_io) i: OutputDevice(f'VALVE_{i}', o_io)
for i in range(1, self.n_zones + 1) for i in range(1, self.n_zones + 1)
} }
self.zone_leds: Dict[int, OutputDevice] = { self.zone_leds: Dict[int, OutputDevice] = {
i: OutputDevice('LED_Z_%d' % i, o_io) i: OutputDevice(f'LED_Z_{i}', o_io)
for i in range(1, self.n_zones + 1) for i in range(1, self.n_zones + 1)
} }
@ -137,10 +137,10 @@ class Outputs(util.StoppableThread):
if task is not None: if task is not None:
self._set_zone_time(task.zone_id, task.remaining) self._set_zone_time(task.zone_id, task.remaining)
self._set_mode_led_manual(task.source == models.Source.MANUAL) self._set_mode_led_manual(task.source == models.Source.MANUAL)
self._set_mode_led_auto(self.app.is_auto_enabled(), self._set_mode_led_auto(self.app.get_auto_mode(),
task.source == models.Source.SCHEDULE) task.source == models.Source.SCHEDULE)
else: else:
self._set_mode_led_auto(self.app.is_auto_enabled(), False) self._set_mode_led_auto(self.app.get_auto_mode(), False)
self._update_states() self._update_states()

View file

@ -13,11 +13,11 @@ class TaskHolder:
class TaskQueue(util.StoppableThread, TaskHolder): class TaskQueue(util.StoppableThread, TaskHolder):
def __init__(self, app: util.AppInterface): def __init__(self, app: models.AppInterface):
super().__init__(0.1) super().__init__(0.1)
self.app = app self.app = app
self.tasks: List[models.Task] = list() self.tasks: List[models.Task] = []
self.running_task: Optional[models.Task] = None self.running_task: Optional[models.Task] = None
def enqueue(self, task: models.Task, exclusive: bool = False) -> bool: def enqueue(self, task: models.Task, exclusive: bool = False) -> bool:
@ -52,6 +52,8 @@ class TaskQueue(util.StoppableThread, TaskHolder):
if self.running_task == task: if self.running_task == task:
self.running_task = None self.running_task = None
self.app.notify_queue_update()
def cancel_current_task(self): def cancel_current_task(self):
""" """
Bricht den aktuell laufenden Task ab Bricht den aktuell laufenden Task ab
@ -61,6 +63,7 @@ class TaskQueue(util.StoppableThread, TaskHolder):
self.tasks.remove(self.running_task) self.tasks.remove(self.running_task)
logging.info('Running task cancelled (%s)', self.running_task) logging.info('Running task cancelled (%s)', self.running_task)
self.running_task = None self.running_task = None
self.app.notify_queue_update()
def serialize(self) -> List[models.Task]: def serialize(self) -> List[models.Task]:
"""Task zur Speicherung in der Datenbank in dict umwandeln.""" """Task zur Speicherung in der Datenbank in dict umwandeln."""
@ -75,27 +78,30 @@ class TaskQueue(util.StoppableThread, TaskHolder):
if self.running_task is None: if self.running_task is None:
for task in self.tasks: for task in self.tasks:
# Only start scheduled tasks if automatic mode is enabled # Only start scheduled tasks if automatic mode is enabled
if task.source == models.Source.SCHEDULE and not self.app.is_auto_enabled( if task.source == models.Source.SCHEDULE and not self.app.get_auto_mode(
): ):
continue continue
self.running_task = task self.running_task = task
self.running_task.start() self.running_task.start()
logging.info('Queued task started (%s)', self.running_task) logging.info('Queued task started (%s)', self.running_task)
self.app.notify_queue_update()
break break
# Check currently running task # Check currently running task
if self.running_task is not None: if self.running_task is not None:
# Stop scheduled tasks if auto mode is disabled # Stop scheduled tasks if auto mode is disabled
if self.running_task.source == models.Source.SCHEDULE and not self.app.is_auto_enabled( if self.running_task.source == models.Source.SCHEDULE and not self.app.get_auto_mode(
): ):
self.running_task.stop() self.running_task.stop()
logging.info('Running task stopped (%s)', self.running_task) logging.info('Running task stopped (%s)', self.running_task)
self.running_task = None self.running_task = None
self.app.notify_queue_update()
elif self.running_task.is_done: elif self.running_task.is_done:
self.tasks.remove(self.running_task) self.tasks.remove(self.running_task)
logging.info('Running task done (%s)', self.running_task) logging.info('Running task done (%s)', self.running_task)
self.running_task = None self.running_task = None
self.app.notify_queue_update()
def cleanup(self): def cleanup(self):
if self.running_task: if self.running_task:

View file

@ -1,13 +1,13 @@
# coding=utf-8 # coding=utf-8
import datetime from datetime import date, datetime
import json import json
import logging
import threading import threading
import time import time
import math
from enum import Enum from enum import Enum
from typing import Any, Union from typing import Any, Union, Optional
from tsgrain_controller import config from tsgrain_controller.grpc_generated import tsgrain_pb2
def _get_np_attrs(o) -> dict: def _get_np_attrs(o) -> dict:
@ -29,13 +29,13 @@ def serializer(o: Any) -> Union[str, dict, int, float, bool]:
""" """
if hasattr(o, 'serialize'): if hasattr(o, 'serialize'):
return o.serialize() return o.serialize()
elif isinstance(o, datetime.datetime) or isinstance(o, datetime.date): if isinstance(o, (datetime, date)):
return o.isoformat() return o.isoformat()
elif isinstance(o, Enum): if isinstance(o, Enum):
return o.value return o.value
elif isinstance(o, int) or isinstance(o, float) or isinstance(o, bool): if isinstance(o, (bool, float, int)):
return o return o
elif hasattr(o, '__dict__'): if hasattr(o, '__dict__'):
return _get_np_attrs(o) return _get_np_attrs(o)
return str(o) return str(o)
@ -71,19 +71,47 @@ def time_ms() -> int:
return round(time.time() * 1000) return round(time.time() * 1000)
def datetime_now() -> datetime.datetime: def datetime_now() -> datetime:
return datetime.datetime.now() return datetime.now()
# pylint: disable-next=too-many-arguments
def datetime_new(year, def datetime_new(year,
month=None, month=None,
day=None, day=None,
hour=0, hour=0,
minute=0, minute=0,
second=0, second=0,
microsecond=0) -> datetime.datetime: microsecond=0) -> datetime:
return datetime.datetime(year, month, day, hour, minute, second, return datetime(year, month, day, hour, minute, second, microsecond)
microsecond)
def datetime_to_proto(
date_time: Optional[datetime]) -> Optional[tsgrain_pb2.Timestamp]:
"""Konvertiert ein Python datetime-Object in einen protobuf-Zeitstempel"""
if date_time is None:
return None
return tsgrain_pb2.Timestamp(seconds=math.floor(date_time.timestamp()))
def datetime_from_proto(
timestamp: Optional[tsgrain_pb2.Timestamp]) -> Optional[datetime]:
"""Konvertiert einen protobuf-Zeitstempel in ein Python datetime-Objekt"""
if timestamp is None:
return None
return datetime.fromtimestamp(timestamp.seconds)
def datetime_deserialize(date_time: Union[datetime, str]) -> datetime:
"""
Deserialisiert ein datetime-Objekt, falls es im String-Format (YYYY-MM-DDThh:mm:ss)
vorliegt.
"""
if isinstance(date_time, str):
return datetime.fromisoformat(date_time)
return date_time
class StoppableThread(threading.Thread): class StoppableThread(threading.Thread):
@ -116,18 +144,6 @@ class StoppableThread(threading.Thread):
self.join() self.join()
class AppInterface:
def is_auto_enabled(self) -> bool:
pass
def get_cfg(self) -> config.Config:
pass
def get_logger(self) -> logging.Logger:
pass
class ZoneUnavailableException(Exception): class ZoneUnavailableException(Exception):
pass pass