mirror of
https://github.com/michivonah/bbzw-horizon.git
synced 2025-12-22 17:16:27 +01:00
beginn new version webservice
This commit is contained in:
parent
a38e1678a6
commit
831ce1dbc8
4 changed files with 237 additions and 87 deletions
160
.gitignore
vendored
Normal file
160
.gitignore
vendored
Normal file
|
|
@ -0,0 +1,160 @@
|
||||||
|
# Byte-compiled / optimized / DLL files
|
||||||
|
__pycache__/
|
||||||
|
*.py[cod]
|
||||||
|
*$py.class
|
||||||
|
|
||||||
|
# C extensions
|
||||||
|
*.so
|
||||||
|
|
||||||
|
# Distribution / packaging
|
||||||
|
.Python
|
||||||
|
build/
|
||||||
|
develop-eggs/
|
||||||
|
dist/
|
||||||
|
downloads/
|
||||||
|
eggs/
|
||||||
|
.eggs/
|
||||||
|
lib/
|
||||||
|
lib64/
|
||||||
|
parts/
|
||||||
|
sdist/
|
||||||
|
var/
|
||||||
|
wheels/
|
||||||
|
share/python-wheels/
|
||||||
|
*.egg-info/
|
||||||
|
.installed.cfg
|
||||||
|
*.egg
|
||||||
|
MANIFEST
|
||||||
|
|
||||||
|
# PyInstaller
|
||||||
|
# Usually these files are written by a python script from a template
|
||||||
|
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||||
|
*.manifest
|
||||||
|
*.spec
|
||||||
|
|
||||||
|
# Installer logs
|
||||||
|
pip-log.txt
|
||||||
|
pip-delete-this-directory.txt
|
||||||
|
|
||||||
|
# Unit test / coverage reports
|
||||||
|
htmlcov/
|
||||||
|
.tox/
|
||||||
|
.nox/
|
||||||
|
.coverage
|
||||||
|
.coverage.*
|
||||||
|
.cache
|
||||||
|
nosetests.xml
|
||||||
|
coverage.xml
|
||||||
|
*.cover
|
||||||
|
*.py,cover
|
||||||
|
.hypothesis/
|
||||||
|
.pytest_cache/
|
||||||
|
cover/
|
||||||
|
|
||||||
|
# Translations
|
||||||
|
*.mo
|
||||||
|
*.pot
|
||||||
|
|
||||||
|
# Django stuff:
|
||||||
|
*.log
|
||||||
|
local_settings.py
|
||||||
|
db.sqlite3
|
||||||
|
db.sqlite3-journal
|
||||||
|
|
||||||
|
# Flask stuff:
|
||||||
|
instance/
|
||||||
|
.webassets-cache
|
||||||
|
|
||||||
|
# Scrapy stuff:
|
||||||
|
.scrapy
|
||||||
|
|
||||||
|
# Sphinx documentation
|
||||||
|
docs/_build/
|
||||||
|
|
||||||
|
# PyBuilder
|
||||||
|
.pybuilder/
|
||||||
|
target/
|
||||||
|
|
||||||
|
# Jupyter Notebook
|
||||||
|
.ipynb_checkpoints
|
||||||
|
|
||||||
|
# IPython
|
||||||
|
profile_default/
|
||||||
|
ipython_config.py
|
||||||
|
|
||||||
|
# pyenv
|
||||||
|
# For a library or package, you might want to ignore these files since the code is
|
||||||
|
# intended to run in multiple environments; otherwise, check them in:
|
||||||
|
# .python-version
|
||||||
|
|
||||||
|
# pipenv
|
||||||
|
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
||||||
|
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
||||||
|
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
||||||
|
# install all needed dependencies.
|
||||||
|
#Pipfile.lock
|
||||||
|
|
||||||
|
# poetry
|
||||||
|
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
|
||||||
|
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
||||||
|
# commonly ignored for libraries.
|
||||||
|
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
|
||||||
|
#poetry.lock
|
||||||
|
|
||||||
|
# pdm
|
||||||
|
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
|
||||||
|
#pdm.lock
|
||||||
|
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
|
||||||
|
# in version control.
|
||||||
|
# https://pdm.fming.dev/#use-with-ide
|
||||||
|
.pdm.toml
|
||||||
|
|
||||||
|
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
|
||||||
|
__pypackages__/
|
||||||
|
|
||||||
|
# Celery stuff
|
||||||
|
celerybeat-schedule
|
||||||
|
celerybeat.pid
|
||||||
|
|
||||||
|
# SageMath parsed files
|
||||||
|
*.sage.py
|
||||||
|
|
||||||
|
# Environments
|
||||||
|
.env
|
||||||
|
.venv
|
||||||
|
env/
|
||||||
|
venv/
|
||||||
|
ENV/
|
||||||
|
env.bak/
|
||||||
|
venv.bak/
|
||||||
|
|
||||||
|
# Spyder project settings
|
||||||
|
.spyderproject
|
||||||
|
.spyproject
|
||||||
|
|
||||||
|
# Rope project settings
|
||||||
|
.ropeproject
|
||||||
|
|
||||||
|
# mkdocs documentation
|
||||||
|
/site
|
||||||
|
|
||||||
|
# mypy
|
||||||
|
.mypy_cache/
|
||||||
|
.dmypy.json
|
||||||
|
dmypy.json
|
||||||
|
|
||||||
|
# Pyre type checker
|
||||||
|
.pyre/
|
||||||
|
|
||||||
|
# pytype static type analyzer
|
||||||
|
.pytype/
|
||||||
|
|
||||||
|
# Cython debug symbols
|
||||||
|
cython_debug/
|
||||||
|
|
||||||
|
# PyCharm
|
||||||
|
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
|
||||||
|
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
||||||
|
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
||||||
|
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
||||||
|
#.idea/
|
||||||
35
webservice/dbfunctions.py
Normal file
35
webservice/dbfunctions.py
Normal file
|
|
@ -0,0 +1,35 @@
|
||||||
|
import psycopg2
|
||||||
|
from psycopg2.extras import execute_values
|
||||||
|
import os
|
||||||
|
from dotenv import load_dotenv
|
||||||
|
|
||||||
|
load_dotenv()
|
||||||
|
|
||||||
|
# Create a database connection
|
||||||
|
def connectDatabase():
|
||||||
|
conn = psycopg2.connect(
|
||||||
|
host=os.getenv('DBHOST'),
|
||||||
|
database=os.getenv('DBNAME'),
|
||||||
|
user=os.getenv('DBUSER'),
|
||||||
|
password=os.getenv('DBPASSWORD'),
|
||||||
|
port=os.getenv('DBPORT'),)
|
||||||
|
conn.autocommit = True
|
||||||
|
cursor = conn.cursor()
|
||||||
|
return cursor
|
||||||
|
|
||||||
|
# Execute database query
|
||||||
|
def executeQuery(query):
|
||||||
|
conn = connectDatabase()
|
||||||
|
conn.execute(query)
|
||||||
|
result = conn.fetchall()
|
||||||
|
return result
|
||||||
|
|
||||||
|
# Execute database query without fetching data
|
||||||
|
def executeWithoutFetch(query):
|
||||||
|
conn = connectDatabase()
|
||||||
|
conn.execute(query)
|
||||||
|
return None
|
||||||
|
|
||||||
|
def bulkInsert(query, data):
|
||||||
|
conn = connectDatabase()
|
||||||
|
return execute_values(conn, query, data)
|
||||||
|
|
@ -1,5 +1,7 @@
|
||||||
fastapi
|
fastapi[standard]==0.115.12
|
||||||
uvicorn
|
uvicorn
|
||||||
sqlalchemy
|
sqlalchemy
|
||||||
psycopg2-binary
|
psycopg2-binary
|
||||||
|
psycopg2-binary==2.9.9
|
||||||
|
python-dotenv==1.0.0
|
||||||
|
requests
|
||||||
|
|
@ -1,94 +1,47 @@
|
||||||
from fastapi import FastAPI, Depends, HTTPException
|
# Webservice
|
||||||
from sqlalchemy import Column, Integer, Float, String, DateTime, create_engine, ForeignKey
|
# INP21b - Timo Weber & Michael von Ah
|
||||||
from sqlalchemy.orm import sessionmaker, declarative_base, Session, relationship
|
|
||||||
|
from fastapi import FastAPI, HTTPException
|
||||||
|
from pydantic import BaseModel
|
||||||
|
import psycopg2
|
||||||
|
import dbfunctions as db
|
||||||
|
import requests, secrets, hashlib, re, string, random, os
|
||||||
|
from datetime import datetime, timedelta, date, time
|
||||||
|
import json
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
import os
|
|
||||||
|
|
||||||
DATABASE_URL = os.getenv("DB_CONNECTION_STRING", "postgresql://user:password@localhost/sensordb")
|
DATABASE_URL = os.getenv("DB_CONNECTION_STRING", "postgresql://user:password@localhost/sensordb")
|
||||||
|
|
||||||
Base = declarative_base()
|
""" class apiFunctions:
|
||||||
engine = create_engine(DATABASE_URL)
|
def __init__(self) -> None:
|
||||||
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
self.alternativeImage = "https://cdn.ep.neodym.dev/media/20240505-halloween.jpeg"
|
||||||
|
|
||||||
app = FastAPI()
|
def getAttractionList(self):
|
||||||
|
query = 'SELECT "name", "id", "imageurl", "description" FROM "attraction" ORDER BY "name";'
|
||||||
|
attractions = db.executeQuery(query)
|
||||||
|
if attractions:
|
||||||
|
return attractions
|
||||||
|
else:
|
||||||
|
raise Exception("Cannot generate list of attractions. Request invalid") """
|
||||||
|
|
||||||
|
|
||||||
# Neue Clients-Tabelle
|
#### API ####
|
||||||
class Client(Base):
|
app = FastAPI(
|
||||||
__tablename__ = "clients"
|
title="M241-M245-BBZW-Horizion",
|
||||||
|
description="BBZW-Horizon",
|
||||||
|
summary="BBZW-Horizon",
|
||||||
|
version="0.0.1"
|
||||||
|
)
|
||||||
|
|
||||||
id = Column(Integer, primary_key=True, index=True)
|
class Session(BaseModel):
|
||||||
name = Column(String, unique=True, index=True)
|
username: str = None
|
||||||
|
token: str = None
|
||||||
|
message: str = None
|
||||||
|
timestamp: datetime = datetime.now()
|
||||||
|
|
||||||
class SensorData(Base):
|
@app.post("/account/new-session", tags=["account"])
|
||||||
__tablename__ = "sensor_data"
|
async def initNewSessionApi(username: str, password: str) -> Session:
|
||||||
|
|
||||||
id = Column(Integer, primary_key=True, index=True)
|
|
||||||
clientid = Column(Integer, ForeignKey("clients.id"), index=True)
|
|
||||||
timestamp = Column(DateTime, default=datetime.utcnow)
|
|
||||||
temperature = Column(Float)
|
|
||||||
humidity = Column(Float)
|
|
||||||
pressure = Column(Float)
|
|
||||||
voc = Column(Float)
|
|
||||||
gas = Column(Float)
|
|
||||||
|
|
||||||
client = relationship("Client")
|
|
||||||
|
|
||||||
class SessionToken(Base):
|
|
||||||
__tablename__ = "sessions"
|
|
||||||
|
|
||||||
id = Column(Integer, primary_key=True, index=True)
|
|
||||||
token = Column(String, unique=True, index=True)
|
|
||||||
validuntil = Column(DateTime)
|
|
||||||
userid = Column(Integer, ForeignKey("user.id"))
|
|
||||||
|
|
||||||
def get_db():
|
|
||||||
db = SessionLocal()
|
|
||||||
try:
|
try:
|
||||||
yield db
|
return Session(username="username", token="sessionToken", message="Session initiated successfully")
|
||||||
finally:
|
except Exception as error:
|
||||||
db.close()
|
raise HTTPException(status_code=401, detail=f"{error}")
|
||||||
|
|
||||||
def verify_token(token: str, db: Session):
|
|
||||||
session = db.query(SessionToken).filter(SessionToken.token == token).first()
|
|
||||||
if not session or session.validuntil < datetime.utcnow():
|
|
||||||
raise HTTPException(status_code=401, detail="Invalid or expired token")
|
|
||||||
return session
|
|
||||||
|
|
||||||
@app.post("/sensor-data/")
|
|
||||||
def receive_sensor_data(
|
|
||||||
token: str,
|
|
||||||
clientname: str, # Ändert clientid zu clientname
|
|
||||||
temperature: float,
|
|
||||||
humidity: float,
|
|
||||||
pressure: float,
|
|
||||||
voc: float,
|
|
||||||
gas: float,
|
|
||||||
db: Session = Depends(get_db)
|
|
||||||
):
|
|
||||||
verify_token(token, db)
|
|
||||||
|
|
||||||
# Suche die Client-ID anhand des Client-Namens
|
|
||||||
client = db.query(Client).filter(Client.name == clientname).first()
|
|
||||||
if not client:
|
|
||||||
raise HTTPException(status_code=404, detail="Client not found")
|
|
||||||
|
|
||||||
sensor_data = SensorData(
|
|
||||||
clientid=client.id, # Verwende die gefundene ID
|
|
||||||
temperature=temperature,
|
|
||||||
humidity=humidity,
|
|
||||||
pressure=pressure,
|
|
||||||
voc=voc,
|
|
||||||
gas=gas
|
|
||||||
)
|
|
||||||
db.add(sensor_data)
|
|
||||||
db.commit()
|
|
||||||
db.refresh(sensor_data)
|
|
||||||
return {"message": "Data received", "id": sensor_data.id}
|
|
||||||
|
|
||||||
@app.get("/sensor-data/")
|
|
||||||
def get_sensor_data(db: Session = Depends(get_db)):
|
|
||||||
data = db.query(SensorData).all()
|
|
||||||
return data
|
|
||||||
|
|
||||||
# Erstelle die Tabellen, falls sie noch nicht existieren
|
|
||||||
Base.metadata.create_all(bind=engine)
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue