diff --git a/backend/.env.template b/backend/.env.template index 3c404bab560f96c67d262d1a72ff58a08b3851e9..241264b463043767943b3ef5f981fe891453cee2 100644 --- a/backend/.env.template +++ b/backend/.env.template @@ -6,4 +6,23 @@ MYSQL_ROOT_PASSWORD=rootpassword DB_HOST=localhost DB_PORT=3306 -WEB_ROOT=http://localhost:3000 \ No newline at end of file +WEB_ROOT=http://localhost:3000 + +CAM_NUMBER=1 + +CAM_0_PLACE=local +CAM_0_IP= +CAM_0_USER= +CAM_0_PASSWORD= +CAM_0_STREAM=stream1 +CAM_0_A_FACTOR=30 +CAM_0_B_FACTOR=120 +CAM_0_FRAMEGAP=150 +CAM_0_POINTS_NB=7 +CAM_0_POINT_0=70, 370 +CAM_0_POINT_1=420, 720 +CAM_0_POINT_2=1280, 720 +CAM_0_POINT_3=1280, 250 +CAM_0_POINT_4=930, 215 +CAM_0_POINT_5=450, 550 +CAM_0_POINT_6=130, 350 \ No newline at end of file diff --git a/backend/db/crud.py b/backend/db/crud.py index 77f677b9b3d1d48e545e484b5753f84df1ef1cf2..37af3b1ad4cb7c30276dbb9dd8d27f971de2af56 100644 --- a/backend/db/crud.py +++ b/backend/db/crud.py @@ -18,7 +18,8 @@ def get_waiting_time(place: str, db: Session): data = {"status": False, "waiting_time": None, "next_timetable": None} first_timeslot = get_timeslot(place, weekday, True, db) if first_timeslot and current_time < first_timeslot[0]: - data["next_timetable"] = "{:d}h{:02d}".format(first_timeslot[0].hour, first_timeslot[0].minute) + data["next_timetable"] = "{:d}h{:02d}".format( + first_timeslot[0].hour, first_timeslot[0].minute) return data elif first_timeslot and current_time <= first_timeslot[1]: last_record = db.query( @@ -36,7 +37,8 @@ def get_waiting_time(place: str, db: Session): return data second_timeslot = get_timeslot(place, weekday, False, db) if second_timeslot and current_time < second_timeslot[0]: - data["next_timetable"] = "{:d}h{:02d}".format(second_timeslot[0].hour, second_timeslot[0].minute) + data["next_timetable"] = "{:d}h{:02d}".format( + second_timeslot[0].hour, second_timeslot[0].minute) return data elif second_timeslot and current_time <= second_timeslot[1]: last_record = db.query( @@ -55,7 +57,8 @@ def get_waiting_time(place: str, db: Session): return data -def get_avg_graph_points(place: str, weekday: int, min_time: time, max_time: time, interval: timedelta, db: Session): +def get_avg_graph_points(place: str, weekday: int, min_time: time, + max_time: time, interval: timedelta, db: Session): """ Get the average waiting time for each interval between two time steps """ def shift_time(t: time, delta: timedelta): @@ -105,14 +108,17 @@ def get_avg_graph(place: str, db: Session): weekday, current_time = current_date.weekday(), current_date.time() first_timeslot = get_timeslot(place, weekday, True, db) if first_timeslot and current_time <= first_timeslot[1]: - return get_avg_graph_points(place, weekday, first_timeslot[0], first_timeslot[1], timedelta(minutes=5), db) + return get_avg_graph_points( + place, weekday, first_timeslot[0], first_timeslot[1], timedelta(minutes=5), db) second_timeslot = get_timeslot(place, weekday, False, db) if second_timeslot and current_time <= second_timeslot[1]: - return get_avg_graph_points(place, weekday, second_timeslot[0], second_timeslot[1], timedelta(minutes=5), db) + return get_avg_graph_points( + place, weekday, second_timeslot[0], second_timeslot[1], timedelta(minutes=5), db) return None -def get_current_graph_points(place: str, current_date: date, min_time: time, max_time: time, interval: timedelta, db: Session): +def get_current_graph_points(place: str, current_date: date, + min_time: time, max_time: time, interval: timedelta, db: Session): """ Get the waiting time for each interval between two time steps for the current timeslot """ def shift_time(t: time, delta: timedelta): @@ -160,17 +166,20 @@ def get_current_graph_points(place: str, current_date: date, min_time: time, max def get_current_graph(place: str, db: Session): """ Get the waiting_time_graph for the current timeslot""" current_date = datetime.now(tz=pytz.timezone("Europe/Paris")) - weekday, day, current_time = current_date.weekday(), current_date.date(), current_date.time() + weekday, day, current_time = current_date.weekday( + ), current_date.date(), current_date.time() first_timeslot = get_timeslot(place, weekday, True, db) if first_timeslot and current_time <= first_timeslot[0]: return [] elif first_timeslot and current_time <= first_timeslot[1]: - return get_current_graph_points(place, day, first_timeslot[0], current_time, timedelta(minutes=5), db) + return get_current_graph_points( + place, day, first_timeslot[0], current_time, timedelta(minutes=5), db) second_timeslot = get_timeslot(place, weekday, False, db) if second_timeslot and current_time <= second_timeslot[0]: return [] elif second_timeslot and current_time <= second_timeslot[1]: - return get_current_graph_points(place, day, second_timeslot[0], current_time, timedelta(minutes=5), db) + return get_current_graph_points( + place, day, second_timeslot[0], current_time, timedelta(minutes=5), db) return [] @@ -179,7 +188,10 @@ def get_current_graph(place: str, db: Session): def get_comments(place: str, page: int, db: Session): """ Get the 10 last comments for the given place """ if page == 0: - comments = db.query(models.Comments).order_by(models.Comments.published_at.desc(), models.Comments.id.desc()).all() + comments = db.query( + models.Comments).order_by( + models.Comments.published_at.desc(), + models.Comments.id.desc()).all() else: comments = db.query( models.Comments).filter( @@ -197,7 +209,10 @@ def get_comments(place: str, page: int, db: Session): def create_comment(place: str, new_comments: schemas.CommentBase, db: Session): """ Add a new comment to the database """ date = datetime.now(tz=pytz.timezone("Europe/Paris")) - db_comment = models.Comments(**new_comments.dict(), published_at=date, place=place) + db_comment = models.Comments( + **new_comments.dict(), + published_at=date, + place=place) db.add(db_comment) db.commit() db.refresh(db_comment) @@ -217,7 +232,10 @@ def delete_comment(id: int, db: Session): def get_news(place: str, db: Session): """ Get the news for the given place """ - news = db.query(models.News).filter(models.News.place == place).order_by(models.News.published_at.desc()).all() + news = db.query( + models.News).filter( + models.News.place == place).order_by( + models.News.published_at.desc()).all() return news @@ -267,7 +285,8 @@ def get_timeslot(place: str, day: int, timeslot: bool, db: Session): return opening_hours -def create_opening_hours(new_opening_hours: schemas.OpeningHoursBase, db: Session): +def create_opening_hours( + new_opening_hours: schemas.OpeningHoursBase, db: Session): """ Add opening hours to the database """ db_opening_hours = models.OpeningHours(**new_opening_hours.dict()) db.add(db_opening_hours) @@ -281,7 +300,9 @@ def delete_opening_hours(id: int, db: Session): if id == 0: db.query(models.OpeningHours).delete() else: - db.query(models.OpeningHours).filter(models.OpeningHours.id == id).delete() + db.query( + models.OpeningHours).filter( + models.OpeningHours.id == id).delete() db.commit() @@ -290,7 +311,9 @@ def delete_opening_hours(id: int, db: Session): def get_restaurants(db: Session): current_date = datetime.now(tz=pytz.timezone("Europe/Paris")) weekday, current_time = current_date.weekday(), current_date.time() - restaurant_names = [r.place for r in db.query(models.OpeningHours.place).distinct()] + restaurant_names = [ + r.place for r in db.query( + models.OpeningHours.place).distinct()] restaurants = [] for name in restaurant_names: @@ -328,7 +351,8 @@ def get_restaurants(db: Session): ).first() if last_record: waiting_time = last_record.waiting_time - restaurant["waiting_time"] = round(waiting_time.total_seconds() / 60) + restaurant["waiting_time"] = round( + waiting_time.total_seconds() / 60) else: restaurant["waiting_time"] = None else: diff --git a/backend/db/schemas.py b/backend/db/schemas.py index e6107329523baece2bc414e6a1c9bbace35912f2..32d98acd83dc8d98efd348654ff48ad70db616d7 100644 --- a/backend/db/schemas.py +++ b/backend/db/schemas.py @@ -8,10 +8,12 @@ from pydantic import BaseModel, Field class RecordBase(BaseModel): """Records base schema""" - place: str = Field(..., title="Name of the RU corresponding the given record") + place: str = Field(..., + title="Name of the RU corresponding the given record") date: datetime = Field(..., title="Date of the record") density: float = Field(..., title="Estimated density of people") - waiting_time: Optional[timedelta] = Field(title="Estimated waiting time for people coming at this date") + waiting_time: Optional[timedelta] = Field( + title="Estimated waiting time for people coming at this date") class Record(RecordBase): @@ -30,7 +32,8 @@ class CommentBase(BaseModel): class Comment(CommentBase): """Database comments base schema""" id: int - published_at: datetime = Field(..., title="Publication date of the comment") + published_at: datetime = Field(..., + title="Publication date of the comment") place: str = Field(..., title="Name of the RU corresponding the comment") class Config: @@ -56,9 +59,11 @@ class News(NewsBase): class OpeningHoursBase(BaseModel): """Database opening_hours base schema""" - place: str = Field(..., title="Name of the RU corresponding the given record") + place: str = Field(..., + title="Name of the RU corresponding the given record") day: int = Field(..., title="Day of the week") - timeslot: bool = Field(..., title="Service slot (True for midday, False for evening)") + timeslot: bool = Field(..., + title="Service slot (True for midday, False for evening)") open_time: time = Field(..., title="Opening time") close_time: time = Field(..., title="Closing time") diff --git a/backend/main.py b/backend/main.py index 7ab27136d6675db99bbef0f8c48e9a2ab2880bdc..126c03b680c079c74a0b26890530818081de8ad3 100644 --- a/backend/main.py +++ b/backend/main.py @@ -1,3 +1,4 @@ +from venv import create from fastapi import FastAPI from fastapi.middleware.cors import CORSMiddleware from dotenv import load_dotenv @@ -7,9 +8,11 @@ from fastapi import Depends from sqlalchemy.orm import Session from db import schemas from typing import List +from threading import Thread from db import database, models from routers import stats, comments, news +from video_capture import handle_cameras app = FastAPI(docs_url="/api/docs", openapi_url="/api/openapi.json") @@ -30,9 +33,11 @@ app.add_middleware( @app.on_event("startup") -def on_startup(): +async def on_startup(): # Database creation models.Base.metadata.create_all(bind=database.engine) + t = Thread(target=handle_cameras) + t.start() # Integration of routers @@ -43,7 +48,8 @@ app.include_router(news.router) @app.get('/api/records', response_model=List[schemas.Record]) async def get_records(place: str, db: Session = Depends(get_db)): - return db.query(models.Records).filter(models.Records.place == place).order_by(models.Records.date.desc()).all() + return db.query(models.Records).filter(models.Records.place == + place).order_by(models.Records.date.desc()).all() @app.post('/api/records', response_model=schemas.Record) @@ -63,39 +69,3 @@ async def del_records(id: int, db: Session = Depends(get_db)): db.query(models.Records).filter(models.Records.id == id).delete() db.commit() return - - -""" -import cv2 -import numpy as np -import keras - -from utils.preprocessing import fix_singular_shape, norm_by_imagenet - - -model = keras.models.load_model('model') - -# contours of the zone of a picture that should be analyzed by the model -contours = { - 'eiffel': [[70, 370], [420, 720], [1280, 720], [1280, 250], [930, 215], [450, 550], [130, 350]] -} - -masks = {} -for key, polygon in contours.items(): - mask = np.zeros((1280, 720, 3), dtype=np.unit8) - cv2.fillPoly(mask, [polygon], (255, 255, 255)) - masks[key] = mask - - -@app.get("/estimate/{id}") -async def estimate_(id: str) -> float: - # img = fetch(...) - img = np.zeros((1280, 720, 3)) - resized_img = cv2.cvtColor(cv2.resize(img, (1280, 720)), cv2.COLOR_BGR2RGB).astype(np.float32) - masked_img = cv2.bitwise_and(resized_img, mask[id]) - treated_img = fix_singular_shape(masked_img, 16) - input_image = np.expand_dims(np.squeeze(norm_by_imagenet([treated_img])), axis=0) - pred_map = np.squeeze(model.predict(input_image)) - count_prediction = np.sum(pred_map) - return count_prediction -""" diff --git a/backend/routers/opening_hours.py b/backend/routers/opening_hours.py index af8ab0755dcfacde671b9c04d7f007844bad12be..76ec1fbe3b7cdc07d96e5e31b66a2a867ced0b48 100644 --- a/backend/routers/opening_hours.py +++ b/backend/routers/opening_hours.py @@ -9,12 +9,14 @@ from db.database import get_db router = APIRouter(prefix="/api", tags=["opening_hours"]) -@router.get('/{place}/opening_hours', response_model=List[schemas.OpeningHours]) +@router.get('/{place}/opening_hours', + response_model=List[schemas.OpeningHours]) async def get_opening_hours(place: str, db: Session = Depends(get_db)): return crud.get_opening_hours(place, db) -@router.get('/{place}/opening_hours/{day}/{timeslot}', response_model=List[schemas.OpeningHours]) +@router.get('/{place}/opening_hours/{day}/{timeslot}', + response_model=List[schemas.OpeningHours]) async def get_timeslot(place: str, day: int, timeslot: bool, db: Session = Depends(get_db)): return crud.get_timeslot(place, day, timeslot, db) diff --git a/backend/routers/stats.py b/backend/routers/stats.py index f1790284de0a5a738fbd47ec5d6de23366d58072..b0ac7500dc22466fb766f27a598dc25eb66bb500 100644 --- a/backend/routers/stats.py +++ b/backend/routers/stats.py @@ -24,12 +24,14 @@ async def stats(place: str, db: Session = Depends(get_db)): return crud.get_current_graph(place, db) -@router.get('/{place}/opening_hours', response_model=List[schemas.OpeningHours]) +@router.get('/{place}/opening_hours', + response_model=List[schemas.OpeningHours]) async def get_opening_hours(place: str, db: Session = Depends(get_db)): return crud.get_opening_hours(place, db) -@router.get('/{place}/opening_hours/{day}/{timeslot}', response_model=List[schemas.OpeningHours]) +@router.get('/{place}/opening_hours/{day}/{timeslot}', + response_model=List[schemas.OpeningHours]) async def get_timeslot(place: str, day: int, timeslot: bool, db: Session = Depends(get_db)): return crud.get_timeslot(place, day, timeslot, db) diff --git a/backend/video-capture.py b/backend/video-capture.py deleted file mode 100644 index c9086a827f4f4e27081ebbc47c73caccab4e972f..0000000000000000000000000000000000000000 --- a/backend/video-capture.py +++ /dev/null @@ -1,50 +0,0 @@ -import cv2 -from datetime import datetime, timedelta -import numpy as np -import keras -from utils.preprocessing import fix_singular_shape, norm_by_imagenet -from db import models - -from db.database import SessionLocal - -db = SessionLocal() -model = keras.models.load_model('assets') - -frame_gap = 450 - -cameras = [{ - "place": "local", - "IP": "10.148.38.9", - "user": "viarezocam", - "password": "superponey", - "stream": "stream1", - "framegap": 900, # 60 * camera frequency - "count": 0, # mandatory - "cap": None -}] - -for camera in cameras: - camera.cap = cv2.VideoCapture(f"rtsp://{camera.user}:{camera.password}@{camera.ip}/{camera.stream}") - -while True: - for camera in cameras: - if camera.cap.isOpened(): - ret, frame = camera.cap.read() - if ret and camera.count % camera.frame_gap == 0: - current_time = datetime.now() - treated_img = fix_singular_shape(frame, 16) - input_image = np.expand_dims(np.squeeze(norm_by_imagenet(np.array([treated_img]))), axis=0) - pred_map = np.squeeze(model.predict(input_image)) - count_prediction = np.sum(pred_map) - waiting_time = timedelta(seconds=120 + count_prediction * 30) - record = {"place": camera.place, - "date": current_time, - "density": count_prediction, - "waiting_time": waiting_time} - db_record = models.Records(**record) - db.add(db_record) - db.commit() - db.refresh(db_record) - camera.count += 1 - else: - camera.cap.release() diff --git a/backend/video_capture.py b/backend/video_capture.py new file mode 100644 index 0000000000000000000000000000000000000000..96afe5a8d8795b13600286463e4add0348025c4f --- /dev/null +++ b/backend/video_capture.py @@ -0,0 +1,76 @@ +import cv2 +from datetime import datetime, timedelta +import numpy as np +import keras +from utils.preprocessing import fix_singular_shape, norm_by_imagenet +from db import models +from dotenv import load_dotenv +import os + +from db.database import SessionLocal + + +def handle_cameras(): + model = keras.models.load_model('assets', compile=False) + db = SessionLocal() + load_dotenv() + camera_number = int(os.getenv('CAM_NUMBER')) + cameras = [] + for i in range(camera_number): + camera = {} + camera["place"] = os.getenv(f'CAM_{i}_PLACE') + camera["IP"] = os.getenv(f'CAM_{i}_IP') + camera["user"] = os.getenv(f'CAM_{i}_USER') + camera["password"] = os.getenv(f'CAM_{i}_PASSWORD') + camera["stream"] = os.getenv(f'CAM_{i}_STREAM') + camera["a_factor"] = int(os.getenv(f'CAM_{i}_A_FACTOR')) + camera["b_factor"] = int(os.getenv(f'CAM_{i}_B_FACTOR')) + camera["framegap"] = int(os.getenv(f'CAM_{i}_FRAMEGAP')) + camera["count"] = 0 + camera["cap"] = cv2.VideoCapture( + f'rtsp://{camera["user"]}:{camera["password"]}@{camera["IP"]}/{camera["stream"]}') + mask_length = int(os.getenv(f'CAM_{i}_POINTS_NB')) + mask_points = [] + for j in range(mask_length): + point = os.getenv(f'CAM_{i}_POINT_{j}') + mask_points.append(list(map(int, point.split(',')))) + mask = np.zeros((720, 1280, 3), dtype=np.float32) + cv2.fillPoly(mask, [np.array(mask_points)], (255, 255, 255)) + camera["mask"] = mask + cameras.append(camera) + + while True: + for camera in cameras: + if camera['cap'].isOpened(): + ret, frame = camera['cap'].read() + if ret and camera['count'] % camera['framegap'] == 0: + current_time = datetime.now() + masked_img = cv2.bitwise_and( + frame.astype(np.float32), camera["mask"]) + treated_img = fix_singular_shape(masked_img, 16) + input_image = np.expand_dims( + np.squeeze( + norm_by_imagenet( + np.array( + [treated_img]))), + axis=0) + pred_map = np.squeeze(model.predict(input_image)) + count_prediction = np.sum(pred_map) + print(count_prediction) + waiting_time = timedelta( + seconds=camera['b_factor'] + + int(count_prediction) * + camera['a_factor']) + db_record = models.Records( + place=camera['place'], + date=current_time, + density=int(count_prediction), + waiting_time=waiting_time) + db.add(db_record) + db.commit() + db.refresh(db_record) + camera['count'] += 1 + else: + camera["cap"] = cv2.VideoCapture( + f"rtsp://{camera['user']}:{camera['password']}@{camera['IP']}/{camera['stream']}") + print("tentative de reconnexion")