Developing new version, more advanced and more flexible
This commit is contained in:
parent
21f6e58e2c
commit
df9baaa9a0
162
.archive/old_version/.gitignore
vendored
Normal file
162
.archive/old_version/.gitignore
vendored
Normal file
@ -0,0 +1,162 @@
|
||||
# ---> Python
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
share/python-wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
MANIFEST
|
||||
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.nox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*.cover
|
||||
*.py,cover
|
||||
.hypothesis/
|
||||
.pytest_cache/
|
||||
cover/
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Django stuff:
|
||||
*.log
|
||||
local_settings.py
|
||||
db.sqlite3
|
||||
db.sqlite3-journal
|
||||
|
||||
# Flask stuff:
|
||||
instance/
|
||||
.webassets-cache
|
||||
|
||||
# Scrapy stuff:
|
||||
.scrapy
|
||||
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
|
||||
# PyBuilder
|
||||
.pybuilder/
|
||||
target/
|
||||
|
||||
# Jupyter Notebook
|
||||
.ipynb_checkpoints
|
||||
|
||||
# IPython
|
||||
profile_default/
|
||||
ipython_config.py
|
||||
|
||||
# pyenv
|
||||
# For a library or package, you might want to ignore these files since the code is
|
||||
# intended to run in multiple environments; otherwise, check them in:
|
||||
# .python-version
|
||||
|
||||
# pipenv
|
||||
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
||||
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
||||
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
||||
# install all needed dependencies.
|
||||
#Pipfile.lock
|
||||
|
||||
# poetry
|
||||
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
|
||||
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
||||
# commonly ignored for libraries.
|
||||
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
|
||||
#poetry.lock
|
||||
|
||||
# pdm
|
||||
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
|
||||
#pdm.lock
|
||||
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
|
||||
# in version control.
|
||||
# https://pdm.fming.dev/#use-with-ide
|
||||
.pdm.toml
|
||||
|
||||
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
|
||||
__pypackages__/
|
||||
|
||||
# Celery stuff
|
||||
celerybeat-schedule
|
||||
celerybeat.pid
|
||||
|
||||
# SageMath parsed files
|
||||
*.sage.py
|
||||
|
||||
# Environments
|
||||
.env
|
||||
.venv
|
||||
env/
|
||||
venv/
|
||||
ENV/
|
||||
env.bak/
|
||||
venv.bak/
|
||||
|
||||
# Spyder project settings
|
||||
.spyderproject
|
||||
.spyproject
|
||||
|
||||
# Rope project settings
|
||||
.ropeproject
|
||||
|
||||
# mkdocs documentation
|
||||
/site
|
||||
|
||||
# mypy
|
||||
.mypy_cache/
|
||||
.dmypy.json
|
||||
dmypy.json
|
||||
|
||||
# Pyre type checker
|
||||
.pyre/
|
||||
|
||||
# pytype static type analyzer
|
||||
.pytype/
|
||||
|
||||
# Cython debug symbols
|
||||
cython_debug/
|
||||
|
||||
# PyCharm
|
||||
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
|
||||
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
||||
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
||||
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
||||
#.idea/
|
||||
|
@ -9,7 +9,8 @@ from typing import List, Tuple
|
||||
import pyodbc
|
||||
import snap7
|
||||
from dotenv import load_dotenv
|
||||
from snap7.util.getters import get_lreal, get_bool
|
||||
from snap7.util.getters import get_bool, get_lreal
|
||||
|
||||
load_dotenv()
|
||||
# Determine the correct driver based on OS
|
||||
SQL_DRIVER = (
|
20
.archive/old_version/pyS7_test.py
Normal file
20
.archive/old_version/pyS7_test.py
Normal file
@ -0,0 +1,20 @@
|
||||
import pprint
|
||||
import time
|
||||
|
||||
from pyS7 import S7Client
|
||||
|
||||
LOOPS = 3
|
||||
counter = 0
|
||||
client = S7Client(address="172.16.3.231", rack=0, slot=2)
|
||||
while counter < LOOPS:
|
||||
try:
|
||||
client.connect()
|
||||
tags = ["DB9,DBD0"]
|
||||
data = client.read(tags=tags)
|
||||
print(data)
|
||||
except Exception as e:
|
||||
pprint.pprint(e)
|
||||
finally:
|
||||
client.disconnect()
|
||||
counter += 1
|
||||
time.sleep(1)
|
@ -1,14 +1,16 @@
|
||||
import time
|
||||
import snap7
|
||||
from snap7.util.getters import get_lreal, get_dint
|
||||
import pprint
|
||||
import time
|
||||
|
||||
import snap7
|
||||
from snap7.util.getters import get_dint, get_lreal
|
||||
|
||||
LOOPS = 20
|
||||
counter = 0
|
||||
while counter < LOOPS:
|
||||
try:
|
||||
plc = snap7.client.Client()
|
||||
plc.connect("172.16.3.231", 0, 2)
|
||||
plc.connect("172.16.3.231", 0, 2) # fagor 6
|
||||
plc.connect("172.16.3.230", 0, 2) # fagor 5
|
||||
# pprint.pprint(plc.get_cpu_state())
|
||||
data = plc.db_read(9, 0, 4) # Define range of bytes to read
|
||||
energy_value = get_dint(data, 0) # Read energy value
|
@ -1,9 +0,0 @@
|
||||
# Database credentials
|
||||
DB_SERVER=
|
||||
DB_NAME=
|
||||
DB_USER=
|
||||
DB_PASSWORD=
|
||||
|
||||
# Seq logging credentials
|
||||
SEQ_URL=
|
||||
SEQ_API_KEY=
|
161
.gitignore
vendored
161
.gitignore
vendored
@ -1,162 +1,11 @@
|
||||
# ---> Python
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
share/python-wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
MANIFEST
|
||||
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.nox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*.cover
|
||||
*.py,cover
|
||||
.hypothesis/
|
||||
.pytest_cache/
|
||||
cover/
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Django stuff:
|
||||
*.log
|
||||
local_settings.py
|
||||
db.sqlite3
|
||||
db.sqlite3-journal
|
||||
|
||||
# Flask stuff:
|
||||
instance/
|
||||
.webassets-cache
|
||||
|
||||
# Scrapy stuff:
|
||||
.scrapy
|
||||
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
|
||||
# PyBuilder
|
||||
.pybuilder/
|
||||
target/
|
||||
|
||||
# Jupyter Notebook
|
||||
.ipynb_checkpoints
|
||||
|
||||
# IPython
|
||||
profile_default/
|
||||
ipython_config.py
|
||||
|
||||
# pyenv
|
||||
# For a library or package, you might want to ignore these files since the code is
|
||||
# intended to run in multiple environments; otherwise, check them in:
|
||||
# .python-version
|
||||
|
||||
# pipenv
|
||||
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
||||
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
||||
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
||||
# install all needed dependencies.
|
||||
#Pipfile.lock
|
||||
|
||||
# poetry
|
||||
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
|
||||
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
||||
# commonly ignored for libraries.
|
||||
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
|
||||
#poetry.lock
|
||||
|
||||
# pdm
|
||||
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
|
||||
#pdm.lock
|
||||
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
|
||||
# in version control.
|
||||
# https://pdm.fming.dev/#use-with-ide
|
||||
.pdm.toml
|
||||
|
||||
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
|
||||
__pypackages__/
|
||||
|
||||
# Celery stuff
|
||||
celerybeat-schedule
|
||||
celerybeat.pid
|
||||
|
||||
# SageMath parsed files
|
||||
*.sage.py
|
||||
|
||||
# Environments
|
||||
*.class
|
||||
.env
|
||||
.venv
|
||||
env/
|
||||
venv/
|
||||
ENV/
|
||||
env.bak/
|
||||
venv.bak/
|
||||
|
||||
# Spyder project settings
|
||||
.spyderproject
|
||||
.spyproject
|
||||
|
||||
# Rope project settings
|
||||
.ropeproject
|
||||
|
||||
# mkdocs documentation
|
||||
/site
|
||||
|
||||
# mypy
|
||||
.mypy_cache/
|
||||
.dmypy.json
|
||||
dmypy.json
|
||||
|
||||
# Pyre type checker
|
||||
.pyre/
|
||||
|
||||
# pytype static type analyzer
|
||||
.pytype/
|
||||
|
||||
# Cython debug symbols
|
||||
cython_debug/
|
||||
|
||||
# PyCharm
|
||||
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
|
||||
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
||||
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
||||
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
||||
#.idea/
|
||||
|
||||
*.log
|
||||
.pytest_cache/
|
||||
.coverage
|
||||
config.ini
|
0
chat-memory.md
Normal file
0
chat-memory.md
Normal file
131
classes.py
131
classes.py
@ -1,131 +0,0 @@
|
||||
import os
|
||||
import platform
|
||||
import socket
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
|
||||
import pyodbc
|
||||
import snap7
|
||||
from dotenv import load_dotenv
|
||||
|
||||
load_dotenv()
|
||||
|
||||
|
||||
# Entire scheduler config class
|
||||
class SchedulerConfig:
|
||||
interval: int
|
||||
next_read: datetime
|
||||
|
||||
|
||||
# PLC properties class (energy, air)
|
||||
class Plc:
|
||||
id: int
|
||||
name: str
|
||||
ip: str
|
||||
db_number: int # Where the data is stored in the PLC
|
||||
is_enabled: bool # True if PLC will be read from
|
||||
air_db_offset: int
|
||||
energy_db_offset: int
|
||||
state_db_offset: int
|
||||
location: str # H1 or H2
|
||||
last_energy_read: float
|
||||
last_air_read: float
|
||||
last_state_read: bool
|
||||
last_read_timestamp: datetime
|
||||
|
||||
def __init__(self, id: int, name: str, ip: str, db_number: int,
|
||||
air_db_offset: int, energy_db_offset: int,
|
||||
state_db_offset: int, location: str) -> None:
|
||||
self.id = id
|
||||
self.name = name
|
||||
self.ip = ip
|
||||
self.db_number = db_number
|
||||
self.is_enabled = True # default value
|
||||
self.air_db_offset = air_db_offset
|
||||
self.energy_db_offset = energy_db_offset
|
||||
self.state_db_offset = state_db_offset
|
||||
self.location = location
|
||||
self.last_energy_read = 0.0
|
||||
self.last_air_read = 0.0
|
||||
self.last_state_read = False
|
||||
self.last_read_timestamp = None
|
||||
|
||||
|
||||
def check_connection_snap(self) -> bool:
|
||||
"""Check if the PLC is reachable."""
|
||||
try:
|
||||
client = snap7.client.Client()
|
||||
client.connect(self.ip, 0, 1)
|
||||
client.disconnect()
|
||||
return True
|
||||
except Exception as e:
|
||||
# logger.error(f"❌ Error checking PLC connection: {e}")
|
||||
return False
|
||||
|
||||
def check_connection_socket(self) -> bool:
|
||||
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock:
|
||||
sock.settimeout(1)
|
||||
return sock.connect_ex((self.ip, 102)) == 0
|
||||
|
||||
|
||||
# PAC properties class (energy)
|
||||
class Pac:
|
||||
id: int
|
||||
name: str
|
||||
ip: str
|
||||
port: int
|
||||
is_enabled: bool
|
||||
location: str # H1 or H2
|
||||
last_energy_read: float
|
||||
last_read_timestamp: datetime
|
||||
|
||||
|
||||
class Database:
|
||||
# Private class attributes (internal use)
|
||||
_sql_driver = (
|
||||
"ODBC Driver 18 for SQL Server"
|
||||
if platform.system() == "Linux"
|
||||
else "SQL Server"
|
||||
)
|
||||
_instance: Optional["Database"] = None
|
||||
_connection = None
|
||||
|
||||
def __new__(cls):
|
||||
if cls._instance is None:
|
||||
cls._instance = super(Database, cls).__new__(cls)
|
||||
return cls._instance
|
||||
|
||||
# Public methods (external interface)
|
||||
def __init__(self):
|
||||
if self._connection is None:
|
||||
self._connection = self._create_connection()
|
||||
|
||||
def execute_query(self, query: str, params: tuple = ()) -> pyodbc.Cursor:
|
||||
"""Execute SQL query and return cursor."""
|
||||
cursor = self.get_connection().cursor()
|
||||
cursor.execute(query, params)
|
||||
return cursor
|
||||
|
||||
def get_connection(self) -> pyodbc.Connection:
|
||||
"""Get database connection, create new if needed."""
|
||||
if not self._connection or not self._connection.connected:
|
||||
self._connection = self._create_connection()
|
||||
return self._connection
|
||||
|
||||
def close(self) -> None:
|
||||
"""Close database connection."""
|
||||
if self._connection:
|
||||
self._connection.close()
|
||||
self._connection = None
|
||||
|
||||
# Private methods (internal use)
|
||||
def _create_connection(self) -> pyodbc.Connection:
|
||||
"""Create new database connection."""
|
||||
connection_string = (
|
||||
"DRIVER={SQL Server};"
|
||||
f"SERVER={os.getenv('DB_SERVER')};"
|
||||
f"DATABASE={os.getenv('DB_NAME')};"
|
||||
f"UID={os.getenv('DB_USER')};"
|
||||
f"PWD={os.getenv('DB_PASSWORD')}"
|
||||
)
|
||||
return pyodbc.connect(connection_string)
|
16
config.ini.template
Normal file
16
config.ini.template
Normal file
@ -0,0 +1,16 @@
|
||||
[mssql]
|
||||
host =
|
||||
name =
|
||||
password =
|
||||
user =
|
||||
|
||||
[postgres]
|
||||
host =
|
||||
name =
|
||||
password =
|
||||
port =
|
||||
user =
|
||||
|
||||
[seq]
|
||||
api_key =
|
||||
url =
|
94
notes.md
Normal file
94
notes.md
Normal file
@ -0,0 +1,94 @@
|
||||
# IndustrialTracker
|
||||
|
||||
## Databse model
|
||||
|
||||
### Device
|
||||
Main device object used as base for dependency injection
|
||||
- id - number
|
||||
- name - string
|
||||
- type - (Relation)->DeviceType
|
||||
- enabled - boolean
|
||||
|
||||
### DeviceType
|
||||
Device type definition
|
||||
- id - number
|
||||
- name - string
|
||||
|
||||
### Reading
|
||||
Reading object used to store data from Device
|
||||
- id - number
|
||||
- device - (Relation)->Device
|
||||
- reading_time - date
|
||||
- energy - number
|
||||
- air - number
|
||||
- running - boolean
|
||||
- ... place for more data eventually
|
||||
|
||||
### PLC
|
||||
PLC object used to store configuration of PLC
|
||||
- id - number
|
||||
- ip - string
|
||||
- port - number
|
||||
- db_number - number
|
||||
- energy_offset - number
|
||||
- air_offset - number
|
||||
- running_offset - number
|
||||
- ... place for more offsets definition
|
||||
|
||||
### PAC
|
||||
PAC object used to store configuration of PAC
|
||||
- id - number
|
||||
- ip - string
|
||||
- port - number
|
||||
... something I missed
|
||||
|
||||
### Scheduler
|
||||
- id - number
|
||||
- name - string
|
||||
- interval_seconds - number
|
||||
- next_run - date
|
||||
... something I missed
|
||||
|
||||
## Workflow
|
||||
|
||||
### Main process
|
||||
`main.py`
|
||||
|
||||
Is infinite loop that:
|
||||
- checks scheduler settings from database (Scheduler)
|
||||
- checks if it's time to run scheduler
|
||||
- if yes, runs scheduler then updates next_run date following schema date.now() + interval_seconds
|
||||
|
||||
### Scheduler process/service
|
||||
`scheduler_service.py`
|
||||
|
||||
When scheduler is run, it:
|
||||
- gets all devices from database (Device)
|
||||
- for each device that is enable, at first, check if it is accessible (by pinging)
|
||||
- if yes, then it gets data from device (via read_service) and saves it to database (Reading)
|
||||
|
||||
### Read process/service
|
||||
`read_service.py`
|
||||
> Read process should be unified for all devices and handled by dependency injection.
|
||||
Abstract class for dependency injection is declared in `readers/device_reader.py`.
|
||||
|
||||
Each device type has it's own reader class that implements pinging function and reading function.
|
||||
|
||||
* PLC reader is implemented in `readers/plc_reader.py`
|
||||
* PAC reader is implemented in `readers/pac_reader.py`
|
||||
|
||||
#### PLC reader
|
||||
|
||||
Utilizes `python-snap7` library to communicate with PLC.
|
||||
|
||||
#### PAC reader
|
||||
> TODO: PAC reader is not implemented yet.
|
||||
|
||||
Utilizes `---` library to communicate with PAC.
|
||||
|
||||
### Database process/service
|
||||
`database_service.py`
|
||||
|
||||
Singleton class that implements all database operations.
|
||||
|
||||
> Most use cases should be in scheduler process where readed data are going to be saved to database.
|
29
pyS7_test.py
29
pyS7_test.py
@ -1,29 +0,0 @@
|
||||
import time
|
||||
from pyS7 import S7Client
|
||||
import pprint
|
||||
|
||||
LOOPS = 3
|
||||
counter = 0
|
||||
client = S7Client(address="172.16.3.231", rack=0, slot=2)
|
||||
while counter < LOOPS:
|
||||
try:
|
||||
# Create a new 'S7Client' object to connect to S7-300/400/1200/1500 PLC.
|
||||
# Provide the PLC's IP address and slot/rack information
|
||||
|
||||
# client = S7Client(address="172.16.4.220", rack=0, slot=2)
|
||||
# Establish connection with the PLC
|
||||
client.connect()
|
||||
|
||||
# Define area tags to read
|
||||
tags = ["DB9,DBD0"]
|
||||
|
||||
# Read the data from the PLC using the specified tag list
|
||||
data = client.read(tags=tags)
|
||||
|
||||
print(data) # [True, False, 123, True, 10, -2.54943805634653e-12, 'Hello']
|
||||
except Exception as e:
|
||||
pprint.pprint(e)
|
||||
finally:
|
||||
client.disconnect()
|
||||
counter += 1
|
||||
time.sleep(1)
|
3
requirements-dev.txt
Normal file
3
requirements-dev.txt
Normal file
@ -0,0 +1,3 @@
|
||||
-r requirements.txt
|
||||
pytest
|
||||
ruff
|
@ -1,13 +1,7 @@
|
||||
certifi==2024.8.30
|
||||
charset-normalizer==3.4.0
|
||||
idna==3.10
|
||||
pyodbc==5.2.0
|
||||
pyS7 @ git+https://github.com/FiloCara/pyS7@761c785799106a04ccbc9e19d6201f728165231d
|
||||
python-dateutil==2.9.0.post0
|
||||
python-dotenv==1.0.1
|
||||
python-snap7==2.0.2
|
||||
PyYAML==6.0.2
|
||||
requests==2.32.3
|
||||
seqlog==0.3.31
|
||||
six==1.16.0
|
||||
urllib3==2.2.3
|
||||
sqlalchemy
|
||||
python-snap7
|
||||
pyodbc
|
||||
python-dotenv
|
||||
seqlog
|
||||
pymodbus
|
||||
pyS7 @ git+https://github.com/FiloCara/pyS7@761c785799106a04ccbc9e19d6201f728165231d
|
11
src/models/__init__.py
Normal file
11
src/models/__init__.py
Normal file
@ -0,0 +1,11 @@
|
||||
from sqlalchemy.ext.declarative import declarative_base
|
||||
|
||||
from .device import Device, DeviceType
|
||||
from .pac import PAC
|
||||
from .plc import PLC
|
||||
from .reading import Reading
|
||||
from .scheduler import Scheduler
|
||||
|
||||
Base = declarative_base()
|
||||
|
||||
__all__ = ["Base", "Device", "DeviceType", "PLC", "PAC", "Reading", "Scheduler"]
|
28
src/models/device.py
Normal file
28
src/models/device.py
Normal file
@ -0,0 +1,28 @@
|
||||
from sqlalchemy import Column, ForeignKey, Integer, String
|
||||
from sqlalchemy.orm import relationship
|
||||
|
||||
from models import Base
|
||||
|
||||
|
||||
class DeviceType(Base):
|
||||
__tablename__ = "device_type"
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
name = Column(String(100), nullable=False)
|
||||
|
||||
devices = relationship("Device", back_populates="type")
|
||||
|
||||
|
||||
class Device(Base):
|
||||
__tablename__ = "device"
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
name = Column(String(100), nullable=False)
|
||||
type_id = Column(Integer, ForeignKey("device_type.id"), nullable=False)
|
||||
enabled = Column(Integer, default=True, nullable=False)
|
||||
|
||||
# Relationships
|
||||
type = relationship("DeviceType", back_populates="devices")
|
||||
plc_config = relationship("PLC", back_populates="device", uselist=False)
|
||||
pac_config = relationship("PAC", back_populates="device", uselist=False)
|
||||
readings = relationship("Reading", back_populates="device")
|
15
src/models/pac.py
Normal file
15
src/models/pac.py
Normal file
@ -0,0 +1,15 @@
|
||||
from sqlalchemy import Column, ForeignKey, Integer, String
|
||||
from sqlalchemy.orm import relationship
|
||||
|
||||
from models import Base
|
||||
|
||||
|
||||
class PAC(Base):
|
||||
__tablename__ = "pac"
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
device_id = Column(Integer, ForeignKey("device.id"), nullable=False)
|
||||
ip = Column(String(15), nullable=False)
|
||||
port = Column(Integer, default=102)
|
||||
|
||||
device = relationship("Device", back_populates="pac_config")
|
19
src/models/plc.py
Normal file
19
src/models/plc.py
Normal file
@ -0,0 +1,19 @@
|
||||
from sqlalchemy import Column, ForeignKey, Integer, String
|
||||
from sqlalchemy.orm import relationship
|
||||
|
||||
from models import Base
|
||||
|
||||
|
||||
class PLC(Base):
|
||||
__tablename__ = "plc"
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
device_id = Column(Integer, ForeignKey("device.id"), nullable=False)
|
||||
ip = Column(String(15), nullable=False)
|
||||
port = Column(Integer, default=102)
|
||||
db_number = Column(Integer, nullable=False)
|
||||
energy_offset = Column(Integer)
|
||||
air_offset = Column(Integer)
|
||||
running_offset = Column(Integer)
|
||||
|
||||
device = relationship("Device", back_populates="plc_config")
|
19
src/models/reading.py
Normal file
19
src/models/reading.py
Normal file
@ -0,0 +1,19 @@
|
||||
import datetime
|
||||
|
||||
from sqlalchemy import Boolean, Column, DateTime, Float, ForeignKey, Integer
|
||||
from sqlalchemy.orm import relationship
|
||||
|
||||
from models import Base
|
||||
|
||||
|
||||
class Reading(Base):
|
||||
__tablename__ = "reading"
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
device_id = Column(Integer, ForeignKey("device.id"), nullable=False)
|
||||
reading_time = Column(DateTime, default=datetime.now(datetime.timezone.utc))
|
||||
energy = Column(Float)
|
||||
air = Column(Float)
|
||||
running = Column(Boolean)
|
||||
|
||||
device = relationship("Device", back_populates="readings")
|
12
src/models/scheduler.py
Normal file
12
src/models/scheduler.py
Normal file
@ -0,0 +1,12 @@
|
||||
from sqlalchemy import Column, DateTime, Integer, String
|
||||
|
||||
from models import Base
|
||||
|
||||
|
||||
class Scheduler(Base):
|
||||
__tablename__ = "scheduler"
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
name = Column(String(100), nullable=False)
|
||||
interval_seconds = Column(Integer, nullable=False)
|
||||
next_run = Column(DateTime, nullable=False)
|
5
src/readers/__init__.py
Normal file
5
src/readers/__init__.py
Normal file
@ -0,0 +1,5 @@
|
||||
from .device_reader import DeviceReader
|
||||
from .pac_reader import PACReader
|
||||
from .plc_reader import PLCReader
|
||||
|
||||
__all__ = ["DeviceReader", "PLCReader", "PACReader"]
|
20
src/readers/device_reader.py
Normal file
20
src/readers/device_reader.py
Normal file
@ -0,0 +1,20 @@
|
||||
from abc import ABC, abstractmethod
|
||||
|
||||
from src.models import Device, Reading
|
||||
|
||||
|
||||
# Abstract base class for device readers
|
||||
# This class defines a common interface that all device readers must implement
|
||||
# Using abstract classes ensures consistency across different device types
|
||||
class DeviceReader(ABC):
|
||||
@abstractmethod
|
||||
def is_accessible(self, device: Device) -> bool:
|
||||
# This abstract method forces all child classes to implement their own
|
||||
# device accessibility check logic
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def collect_reading(self, device: Device) -> Reading:
|
||||
# This abstract method forces all child classes to implement their own
|
||||
# data collection logic specific to the device type
|
||||
pass
|
16
src/readers/pac_reader.py
Normal file
16
src/readers/pac_reader.py
Normal file
@ -0,0 +1,16 @@
|
||||
from .device_reader import DeviceReader
|
||||
from src.models import Device, Reading
|
||||
|
||||
|
||||
# Concrete implementation for PAC (Programmable Automation Controller) devices
|
||||
# Inherits from DeviceReader and must implement all abstract methods
|
||||
class PACReader(DeviceReader):
|
||||
def is_accessible(self, device: Device) -> bool:
|
||||
# Implement PAC ping check
|
||||
# This method will contain specific logic for checking PAC connectivity
|
||||
pass
|
||||
|
||||
def collect_reading(self, device: Device) -> Reading:
|
||||
# Implement PAC data collection
|
||||
# This method will contain specific logic for reading data from PAC devices
|
||||
pass
|
16
src/readers/plc_reader.py
Normal file
16
src/readers/plc_reader.py
Normal file
@ -0,0 +1,16 @@
|
||||
from .device_reader import DeviceReader
|
||||
from src.models import Device, Reading
|
||||
|
||||
|
||||
# Concrete implementation for PLC (Programmable Logic Controller) devices
|
||||
# Inherits from DeviceReader and must implement all abstract methods
|
||||
class PLCReader(DeviceReader):
|
||||
def is_accessible(self, device: Device) -> bool:
|
||||
# Implement PLC ping check
|
||||
# This method will contain specific logic for checking PLC connectivity
|
||||
pass
|
||||
|
||||
def collect_reading(self, device: Device) -> Reading:
|
||||
# Implement PLC data collection
|
||||
# This method will contain specific logic for reading data from PLC devices
|
||||
pass
|
46
src/scripts/test_db.py
Normal file
46
src/scripts/test_db.py
Normal file
@ -0,0 +1,46 @@
|
||||
from datetime import datetime, timedelta
|
||||
from services.database_service import DatabaseService
|
||||
|
||||
def test_database_operations():
|
||||
# Initialize database service
|
||||
db = DatabaseService()
|
||||
|
||||
# Test getting device types
|
||||
print("\n=== Testing Device Types ===")
|
||||
device_types = db.get_device_types()
|
||||
print(f"Found {len(device_types)} device types")
|
||||
for dt in device_types:
|
||||
print(f"- {dt.name}")
|
||||
|
||||
# Test getting enabled devices
|
||||
print("\n=== Testing Enabled Devices ===")
|
||||
enabled_devices = db.get_enabled_devices()
|
||||
print(f"Found {len(enabled_devices)} enabled devices")
|
||||
for device in enabled_devices:
|
||||
print(f"- {device.name} (Type: {device.type.name})")
|
||||
|
||||
# Test scheduler operations
|
||||
print("\n=== Testing Scheduler ===")
|
||||
scheduler = db.get_scheduler("main")
|
||||
if scheduler:
|
||||
print(f"Current scheduler: {scheduler.name}")
|
||||
print(f"Current next_run: {scheduler.next_run}")
|
||||
|
||||
# Test updating scheduler
|
||||
new_next_run = datetime.now() + timedelta(minutes=5)
|
||||
db.update_scheduler_next_run(scheduler, new_next_run)
|
||||
print(f"Updated next_run to: {new_next_run}")
|
||||
|
||||
# Test getting readings for a device
|
||||
print("\n=== Testing Readings ===")
|
||||
if enabled_devices:
|
||||
test_device = enabled_devices[0]
|
||||
from_date = datetime.now() - timedelta(days=1)
|
||||
to_date = datetime.now()
|
||||
readings = db.get_readings_by_device(test_device.id, from_date, to_date)
|
||||
print(f"Found {len(readings)} readings for device {test_device.name}")
|
||||
for reading in readings[:5]: # Show first 5 readings
|
||||
print(f"- Time: {reading.reading_time}, Energy: {reading.energy}")
|
||||
|
||||
if __name__ == "__main__":
|
||||
test_database_operations()
|
0
src/services/__init__.py
Normal file
0
src/services/__init__.py
Normal file
79
src/services/database_service.py
Normal file
79
src/services/database_service.py
Normal file
@ -0,0 +1,79 @@
|
||||
from datetime import datetime
|
||||
from typing import List, Optional
|
||||
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from src.models import PAC, PLC, Device, DeviceType, Reading, Scheduler
|
||||
from utils.config import Config
|
||||
|
||||
|
||||
class SingletonMeta(type):
|
||||
_instances = {}
|
||||
|
||||
def __call__(cls, *args, **kwargs):
|
||||
"""
|
||||
This is a singleton metaclass implementation that ensures only one instance of a class is created.
|
||||
When a class with this metaclass is instantiated:
|
||||
1. It checks if the class already has an instance in _instances dictionary
|
||||
2. If no instance exists, it creates one using super().__call__ and stores it
|
||||
3. If instance exists, it returns the stored instance
|
||||
This way, multiple calls to create an instance will always return the same object
|
||||
"""
|
||||
if cls not in cls._instances:
|
||||
cls._instances[cls] = super().__call__(*args, **kwargs)
|
||||
return cls._instances[cls]
|
||||
|
||||
class DatabaseService(metaclass=SingletonMeta):
|
||||
def __init__(self):
|
||||
config = Config()
|
||||
engine = create_engine(config.get_database_url())
|
||||
self.session = Session(engine)
|
||||
|
||||
def get_enabled_devices(self) -> List[Device]:
|
||||
return self.session.query(Device).filter(Device.enabled.is_(True)).all()
|
||||
|
||||
def get_scheduler(self, name: str) -> Optional[Scheduler]:
|
||||
return self.session.query(Scheduler).filter(Scheduler.name == name).first()
|
||||
|
||||
def update_scheduler_next_run(
|
||||
self, scheduler: Scheduler, next_run: datetime
|
||||
) -> None:
|
||||
scheduler.next_run = next_run
|
||||
self.session.commit()
|
||||
|
||||
def save_reading(self, reading: Reading) -> None:
|
||||
self.session.add(reading)
|
||||
self.session.commit()
|
||||
|
||||
def save_readings(self, readings: List[Reading]) -> None:
|
||||
self.session.add_all(readings)
|
||||
self.session.commit()
|
||||
|
||||
def get_device_types(self) -> List[DeviceType]:
|
||||
return self.session.query(DeviceType).all()
|
||||
|
||||
def get_plc_config(self, device_id: int) -> Optional[PLC]:
|
||||
return self.session.query(PLC).filter(PLC.id == device_id).first()
|
||||
|
||||
def get_pac_config(self, device_id: int) -> Optional[PAC]:
|
||||
return self.session.query(PAC).filter(PAC.id == device_id).first()
|
||||
|
||||
def get_device(self, device_id: int) -> Optional[Device]:
|
||||
return self.session.query(Device).filter(Device.id == device_id).first()
|
||||
|
||||
def get_device_by_name(self, name: str) -> Optional[Device]:
|
||||
return self.session.query(Device).filter(Device.name == name).first()
|
||||
|
||||
def get_readings_by_device(
|
||||
self, device_id: int, from_date: datetime, to_date: datetime
|
||||
) -> List[Reading]:
|
||||
return (
|
||||
self.session.query(Reading)
|
||||
.filter(
|
||||
Reading.device_id == device_id,
|
||||
Reading.reading_time >= from_date,
|
||||
Reading.reading_time <= to_date,
|
||||
)
|
||||
.all()
|
||||
)
|
28
src/services/scheduler_service.py
Normal file
28
src/services/scheduler_service.py
Normal file
@ -0,0 +1,28 @@
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from src.models import Scheduler
|
||||
|
||||
|
||||
class SchedulerService:
|
||||
def __init__(self, db_session: Session):
|
||||
# Initialize the scheduler service with a database session
|
||||
self.session = db_session
|
||||
|
||||
def run_main_loop(self):
|
||||
# Main loop that continuously checks and executes scheduled tasks
|
||||
while True:
|
||||
# Get all scheduler entries from the database
|
||||
schedulers = self.session.query(Scheduler).all()
|
||||
for scheduler in schedulers:
|
||||
# Check if it's time to execute the scheduler
|
||||
if datetime.now(datetime.timezone.utc) >= scheduler.next_run:
|
||||
# Execute the scheduled task
|
||||
self.execute_scheduler(scheduler)
|
||||
# Calculate and set the next run time based on the interval
|
||||
scheduler.next_run = datetime.now(
|
||||
datetime.timezone.utc
|
||||
) + timedelta(seconds=scheduler.interval_seconds)
|
||||
# Save changes to the database
|
||||
self.session.commit()
|
34
src/utils/config.py
Normal file
34
src/utils/config.py
Normal file
@ -0,0 +1,34 @@
|
||||
from configparser import ConfigParser
|
||||
from pathlib import Path
|
||||
import platform
|
||||
|
||||
|
||||
class Config:
|
||||
_instance = None
|
||||
|
||||
def __new__(cls):
|
||||
if cls._instance is None:
|
||||
cls._instance = super().__new__(cls)
|
||||
config = ConfigParser()
|
||||
config_path = Path("config.ini")
|
||||
config.read(config_path)
|
||||
cls._instance.config = config
|
||||
return cls._instance
|
||||
|
||||
def get_mssql_url(self) -> str:
|
||||
mssql = self.config["mssql"]
|
||||
driver = "ODBC Driver 18 for SQL Server" if platform.platform.system() == "Linux" else "SQL Server"
|
||||
driver = driver.replace(" ", "+")
|
||||
return f"mssql+pyodbc://{mssql['user']}:{mssql['password']}@{mssql['host']}/{mssql['name']}?driver={driver}"
|
||||
|
||||
def get_postgres_url(self) -> str:
|
||||
db = self.config["postgres"]
|
||||
return f"postgresql://{db['user']}:{db['password']}@{db['host']}/{db['name']}"
|
||||
|
||||
def get_seq_url(self) -> str:
|
||||
seq = self.config["seq"]
|
||||
return seq["url"]
|
||||
|
||||
def get_seq_api_key(self) -> str:
|
||||
seq = self.config["seq"]
|
||||
return seq["api_key"]
|
0
src/utils/helpers.py
Normal file
0
src/utils/helpers.py
Normal file
0
src/utils/logger.py
Normal file
0
src/utils/logger.py
Normal file
2
test-server.sh
Normal file
2
test-server.sh
Normal file
@ -0,0 +1,2 @@
|
||||
#!.venv/bin/python
|
||||
python -m snap7.server
|
Loading…
x
Reference in New Issue
Block a user