diff options
-rw-r--r-- | docker/docker-compose.yaml | 15 | ||||
-rw-r--r-- | docker/postgres/Dockerfile | 5 | ||||
-rw-r--r-- | docker/postgres/schema.sql | 99 | ||||
-rw-r--r-- | env-vars.sh | 3 | ||||
-rw-r--r-- | requirements.txt | 6 | ||||
-rw-r--r-- | src/db/couch/__init__.py (renamed from src/couch/__init__.py) | 2 | ||||
-rw-r--r-- | src/db/couch/client.py (renamed from src/couch/client.py) | 14 | ||||
-rw-r--r-- | src/db/couch/exceptions.py (renamed from src/couch/exceptions.py) | 0 | ||||
-rw-r--r-- | src/db/couch/feedreader.py (renamed from src/couch/feedreader.py) | 0 | ||||
-rw-r--r-- | src/db/couch/resource.py (renamed from src/couch/resource.py) | 5 | ||||
-rw-r--r-- | src/db/couch/utils.py (renamed from src/couch/utils.py) | 0 | ||||
-rwxr-xr-x | src/db/dictionary.py (renamed from src/db.py) | 4 | ||||
-rw-r--r-- | src/db/index.py | 61 | ||||
-rw-r--r-- | src/db/schema.py (renamed from src/schema.py) | 0 | ||||
-rw-r--r-- | src/db/sql.py | 170 | ||||
-rwxr-xr-x | src/main.py | 48 |
16 files changed, 405 insertions, 27 deletions
diff --git a/docker/docker-compose.yaml b/docker/docker-compose.yaml index 23a543b..ccd957d 100644 --- a/docker/docker-compose.yaml +++ b/docker/docker-compose.yaml @@ -34,7 +34,22 @@ services: source: couchdb-data target: /opt/couchdb/data + postgres: + build: ./postgres + volumes: + - type: volume + source: postgres-data + target: /var/lib/postgresql/data/ + environment: + - POSTGRES_USER + - POSTGRES_PASSWORD + - POSTGRES_DB + ports: + - 5432:5432 + volumes: couchdb-data: external: false + postgres-data: + external: false certs: diff --git a/docker/postgres/Dockerfile b/docker/postgres/Dockerfile new file mode 100644 index 0000000..1699f02 --- /dev/null +++ b/docker/postgres/Dockerfile @@ -0,0 +1,5 @@ +FROM postgres:14 + +COPY schema.sql /docker-entrypoint-initdb.d/ + +EXPOSE 5432 diff --git a/docker/postgres/schema.sql b/docker/postgres/schema.sql new file mode 100644 index 0000000..c6e1323 --- /dev/null +++ b/docker/postgres/schema.sql @@ -0,0 +1,99 @@ +--
+-- PostgreSQL database dump
+--
+
+-- Dumped from database version 14.2 (Debian 14.2-1.pgdg110+1)
+-- Dumped by pg_dump version 14.2 (Debian 14.2-1.pgdg110+1)
+
+SET statement_timeout = 0;
+SET lock_timeout = 0;
+SET idle_in_transaction_session_timeout = 0;
+SET client_encoding = 'UTF8';
+SET standard_conforming_strings = on;
+SELECT pg_catalog.set_config('search_path', '', false);
+SET check_function_bodies = false;
+SET xmloption = content;
+SET client_min_messages = warning;
+SET row_security = off;
+
+SET default_tablespace = '';
+
+SET default_table_access_method = heap;
+
+--
+-- Name: log; Type: TABLE; Schema: public; Owner: test
+--
+
+CREATE TABLE public.log (
+ id SERIAL PRIMARY KEY,
+ "timestamp" date NOT NULL,
+ username text NOT NULL,
+ logtext text
+);
+
+
+ALTER TABLE public.log OWNER TO test;
+
+--
+-- Name: scanner; Type: TABLE; Schema: public; Owner: test
+--
+
+CREATE TABLE public.scanner (
+ id SERIAL PRIMARY KEY,
+ runner text DEFAULT '*',
+ name character varying(128) NOT NULL,
+ active boolean NOT NULL,
+ "interval" integer DEFAULT 300 NOT NULL,
+ starttime date,
+ endtime date,
+ maxruns integer DEFAULT 1,
+ hostname character varying(128) NOT NULL,
+ port integer NOT NULL
+);
+
+
+ALTER TABLE public.scanner OWNER TO test;
+
+--
+-- Data for Name: log; Type: TABLE DATA; Schema: public; Owner: test
+--
+
+COPY public.log (id, "timestamp", username, logtext) FROM stdin;
+\.
+
+
+--
+-- Data for Name: scanner; Type: TABLE DATA; Schema: public; Owner: test
+--
+
+COPY public.scanner (id, name, active, "interval", starttime, endtime, maxruns) FROM stdin;
+\.
+
+
+--
+-- Name: log log_pkey; Type: CONSTRAINT; Schema: public; Owner: test
+--
+
+ALTER TABLE ONLY public.log
+ ADD CONSTRAINT log_pkey PRIMARY KEY (id);
+
+
+--
+-- Name: scanner scanner_name_key; Type: CONSTRAINT; Schema: public; Owner: test
+--
+
+ALTER TABLE ONLY public.scanner
+ ADD CONSTRAINT scanner_name_key UNIQUE (name);
+
+
+--
+-- Name: scanner scanner_pkey; Type: CONSTRAINT; Schema: public; Owner: test
+--
+
+ALTER TABLE ONLY public.scanner
+ ADD CONSTRAINT scanner_pkey PRIMARY KEY (id);
+
+
+--
+-- PostgreSQL database dump complete
+--
diff --git a/env-vars.sh b/env-vars.sh index 8361a2f..b5540f1 100644 --- a/env-vars.sh +++ b/env-vars.sh @@ -1,6 +1,7 @@ export COUCHDB_USER=test export COUCHDB_PASSWORD=test export COUCHDB_NAME=test -export COUCHDB_HOSTNAME=couchdb +export COUCHDB_HOSTNAME=localhost export DOCKER_JWT_PUBKEY_PATH=/tmp/soc_collector/ export DOCKER_JWT_HTPASSWD_PATH=/tmp/soc_collector_htpasswd/ +export JWT_PUBKEY_PATH=/tmp/public.pem diff --git a/requirements.txt b/requirements.txt index ce2f921..c50a7e0 100644 --- a/requirements.txt +++ b/requirements.txt @@ -10,18 +10,24 @@ fastapi==0.70.0 fastapi-jwt-auth==0.5.0 h11==0.12.0 idna==3.3 +inflect==5.5.2 iniconfig==1.1.1 +jsonschema==4.4.0 nose==1.3.7 packaging==21.3 pluggy==1.0.0 +psycopg2-binary==2.9.3 py==1.11.0 pycparser==2.20 pydantic==1.8.2 PyJWT==1.7.1 pyparsing==3.0.6 +pyrsistent==0.18.1 pytest==6.2.5 requests==2.26.0 sniffio==1.2.0 +sqlacodegen==2.3.0 +SQLAlchemy==1.4.35 starlette==0.16.0 toml==0.10.2 typing-extensions==3.10.0.2 diff --git a/src/couch/__init__.py b/src/db/couch/__init__.py index a7537bc..b099235 100644 --- a/src/couch/__init__.py +++ b/src/db/couch/__init__.py @@ -8,4 +8,4 @@ __email__ = "rinat.sabitov@gmail.com" __status__ = "Development" -from couch.client import Server # noqa: F401 +from db.couch.client import Server # noqa: F401 diff --git a/src/couch/client.py b/src/db/couch/client.py index 188e0de..73d85a1 100644 --- a/src/couch/client.py +++ b/src/db/couch/client.py @@ -1,18 +1,16 @@ # -*- coding: utf-8 -*- # Based on py-couchdb (https://github.com/histrio/py-couchdb) -import os -import json -import uuid import copy +import json import mimetypes +import os +import uuid import warnings -from couch import utils -from couch import feedreader -from couch import exceptions as exp -from couch.resource import Resource - +from db.couch import exceptions as exp +from db.couch import feedreader, utils +from db.couch.resource import Resource DEFAULT_BASE_URL = os.environ.get('COUCHDB_URL', 'http://localhost:5984/') diff --git a/src/couch/exceptions.py b/src/db/couch/exceptions.py index d7e037b..d7e037b 100644 --- a/src/couch/exceptions.py +++ b/src/db/couch/exceptions.py diff --git a/src/couch/feedreader.py b/src/db/couch/feedreader.py index e293932..e293932 100644 --- a/src/couch/feedreader.py +++ b/src/db/couch/feedreader.py diff --git a/src/couch/resource.py b/src/db/couch/resource.py index da1e0dd..8ff883b 100644 --- a/src/couch/resource.py +++ b/src/db/couch/resource.py @@ -5,10 +5,9 @@ from __future__ import unicode_literals import json -import requests -from couch import utils -from couch import exceptions +import requests +from db.couch import exceptions, utils class Resource(object): diff --git a/src/couch/utils.py b/src/db/couch/utils.py index 1cd21d8..1cd21d8 100644 --- a/src/couch/utils.py +++ b/src/db/couch/utils.py diff --git a/src/db.py b/src/db/dictionary.py index 6f25ec3..f0f5fe9 100755 --- a/src/db.py +++ b/src/db/dictionary.py @@ -11,8 +11,8 @@ import os import sys import time -import couch -from schema import as_index_list, validate_collector_data +from db import couch +from db.schema import as_index_list, validate_collector_data class DictDB(): diff --git a/src/db/index.py b/src/db/index.py new file mode 100644 index 0000000..688ceeb --- /dev/null +++ b/src/db/index.py @@ -0,0 +1,61 @@ +from pydantic import BaseSettings + + +class CouchIindex(BaseSettings): + domain: dict = { + "index": { + "fields": [ + "domain", + ] + }, + "name": "domain-json-index", + "type": "json" + } + ip: dict = { + "index": { + "fields": [ + "domain", + "ip" + ] + }, + "name": "ip-json-index", + "type": "json" + } + port: dict = { + "index": { + "fields": [ + "domain", + "port" + ] + }, + "name": "port-json-index", + "type": "json" + } + asn: dict = { + "index": { + "fields": [ + "domain", + "asn" + ] + }, + "name": "asn-json-index", + "type": "json" + } + asn_country_code: dict = { + "index": { + "fields": [ + "domain", + "asn_country_code" + ] + }, + "name": "asn-country-code-json-index", + "type": "json" + } + + +def as_list(): + index_list = list() + for item in CouchIindex().dict(): + index_list.append(CouchIindex().dict()[item]) + + return index_list diff --git a/src/schema.py b/src/db/schema.py index 9bdf130..9bdf130 100644 --- a/src/schema.py +++ b/src/db/schema.py diff --git a/src/db/sql.py b/src/db/sql.py new file mode 100644 index 0000000..c47a69c --- /dev/null +++ b/src/db/sql.py @@ -0,0 +1,170 @@ +import datetime +import os +import sys +from contextlib import contextmanager + +from sqlalchemy import (Boolean, Column, Date, Integer, String, Text, + create_engine, text) +from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.orm import sessionmaker + +Base = declarative_base() +metadata = Base.metadata + + +class Log(Base): + __tablename__ = "log" + + id = Column(Integer, primary_key=True) + timestamp = Column(Date, nullable=False, + default=datetime.datetime.utcnow) + username = Column(Text, nullable=False) + logtext = Column(Text, nullable=False) + + def as_dict(self): + """Return JSON serializable dict.""" + d = {} + for col in self.__table__.columns: + value = getattr(self, col.name) + if issubclass(value.__class__, Base): + continue + elif issubclass(value.__class__, datetime.datetime): + value = str(value) + d[col.name] = value + return d + + @classmethod + def add(cls, username, logtext): + with sqla_session() as session: + logentry = Log() + logentry.username = username + logentry.logtext = logtext + session.add(logentry) + + +class Scanner(Base): + __tablename__ = 'scanner' + + id = Column(Integer, primary_key=True, server_default=text( + "nextval('scanner_id_seq'::regclass)")) + runner = Column(Text, server_default=text("'*'::text")) + name = Column(String(128), nullable=False) + active = Column(Boolean, nullable=False) + interval = Column(Integer, nullable=False, + server_default=text("300")) + starttime = Column(Date) + endtime = Column(Date) + maxruns = Column(Integer, server_default=text("1")) + hostname = Column(String(128), nullable=False) + port = Column(Integer, nullable=False) + + def as_dict(self): + d = {} + for col in self.__table__.columns: + value = getattr(self, col.name) + if issubclass(value.__class__, Base): + continue + elif issubclass(value.__class__, datetime.datetime): + value = str(value) + d[col.name] = value + return d + + @classmethod + def add(cls, name, hostname, port, active=False, interval=0, + starttime=None, + endtime=None, + maxruns=1): + errors = list() + if starttime and endtime: + if starttime > endtime: + errors.append("Endtime must be after the starttime.") + if interval < 0: + errors.append("Interval must be > 0") + if maxruns < 0: + errors.append("Max runs must be > 0") + with sqla_session() as session: + scanentry = Scanner() + scanentry.name = name + scanentry.active = active + scanentry.interval = interval + if starttime: + scanentry.starttime = starttime + if endtime: + scanentry.endtime = endtime + scanentry.maxruns = maxruns + scanentry.hostname = hostname + scanentry.port = port + session.add(scanentry) + return errors + + @classmethod + def get(cls, name): + results = list() + with sqla_session() as session: + scanners = session.query(Scanner).all() + if not scanners: + return None + for scanner in scanners: + if scanner.runner == "*": + results.append(scanner.as_dict()) + elif scanner.runner == name: + results.append(scanner.as_dict()) + return results + + @classmethod + def edit(cls, name, active): + with sqla_session() as session: + scanners = session.query(Scanner).filter( + Scanner.name == name).all() + if not scanners: + return None + for scanner in scanners: + scanner.active = active + return True + + +def get_sqlalchemy_conn_str(**kwargs) -> str: + try: + if "SQL_HOSTNAME" in os.environ: + hostname = os.environ["SQL_HOSTNAME"] + else: + hostname = "localhost" + print("SQL_HOSTNAME not set, falling back to localhost.") + if "SQL_PORT" in os.environ: + port = os.environ["SQL_PORT"] + else: + print("SQL_PORT not set, falling back to 5432.") + port = 5432 + username = os.environ["SQL_USERNAME"] + password = os.environ["SQL_PASSWORD"] + database = os.environ["SQL_DATABASE"] + except KeyError: + print("SQL_DATABASE, SQL_USERNAME, SQL_PASSWORD must be set.") + sys.exit(-2) + + return ( + f"postgresql://{username}:{password}@{hostname}:{port}/{database}" + ) + + +def get_session(conn_str=""): + if conn_str == "": + conn_str = get_sqlalchemy_conn_str() + + engine = create_engine(conn_str, pool_size=50, max_overflow=0) + Session = sessionmaker(bind=engine) + + return Session() + + +@contextmanager +def sqla_session(conn_str="", **kwargs): + session = get_session(conn_str) + try: + yield session + session.commit() + except Exception: + session.rollback() + raise + finally: + session.close() diff --git a/src/main.py b/src/main.py index 9de8eb8..a62d77c 100755 --- a/src/main.py +++ b/src/main.py @@ -11,14 +11,14 @@ from fastapi_jwt_auth import AuthJWT from fastapi_jwt_auth.exceptions import AuthJWTException from pydantic import BaseModel -from db import DictDB -from schema import get_index_keys, validate_collector_data +from db.dictionary import DictDB +from db.schema import get_index_keys app = FastAPI() app.add_middleware( CORSMiddleware, - allow_origins=["http://localhost:8001"], + allow_origins=["http://localhost:8000"], allow_credentials=True, allow_methods=["*"], allow_headers=["*"], @@ -37,9 +37,8 @@ async def mock_x_total_count_header(request: Request, call_next): for i in range(10): try: db = DictDB() - except Exception: - print( - f'Database not responding, will try again soon. Attempt {i + 1} of 10.') + except Exception as e: + print(f"Database not responding, will try again soon: {e}") else: break time.sleep(1) @@ -90,25 +89,25 @@ class JWTConfig(BaseModel): authjwt_public_key: str = get_pubkey() -@AuthJWT.load_config +@ AuthJWT.load_config def jwt_config(): return JWTConfig() -@app.exception_handler(AuthJWTException) +@ app.exception_handler(AuthJWTException) def authjwt_exception_handler(request: Request, exc: AuthJWTException): return JSONResponse(content={"status": "error", "message": exc.message}, status_code=400) -@app.exception_handler(RuntimeError) +@ app.exception_handler(RuntimeError) def app_exception_handler(request: Request, exc: RuntimeError): return JSONResponse(content={"status": "error", "message": str(exc.with_traceback(None))}, status_code=400) -@app.get('/sc/v0/get') +@ app.get('/sc/v0/get') async def get(key=None, limit=25, skip=0, ip=None, port=None, asn=None, Authorize: AuthJWT = Depends()): @@ -134,7 +133,7 @@ async def get(key=None, limit=25, skip=0, ip=None, port=None, return JSONResponse(content={"status": "success", "docs": data}) -@app.get('/sc/v0/get/{key}') +@ app.get('/sc/v0/get/{key}') async def get_key(key=None, Authorize: AuthJWT = Depends()): Authorize.jwt_required() @@ -195,7 +194,7 @@ async def add(data: Request, Authorize: AuthJWT = Depends()): return JSONResponse(content={"status": "success", "docs": key}) -@app.delete('/sc/v0/delete/{key}') +@ app.delete('/sc/v0/delete/{key}') async def delete(key, Authorize: AuthJWT = Depends()): Authorize.jwt_required() @@ -232,6 +231,31 @@ async def delete(key, Authorize: AuthJWT = Depends()): return JSONResponse(content={"status": "success", "docs": data}) +@ app.get("/sc/v0/scanner/{name}") +async def scanner_get(name, data: Request, Authorize: AuthJWT = Depends()): + Authorize.jwt_required() + + scanners = Scanner.get(name) + + return JSONResponse(content={"status": "success", "data": scanners}) + + +@ app.put("/sc/v0/scanner/{name}") +async def scanner_put(name, data: Request, Authorize: AuthJWT = Depends()): + errors = None + Authorize.jwt_required() + + json_data = await data.json() + + if "active" in json_data and isinstance(json_data["active"], bool): + errors = Scanner.active(name, json_data["active"]) + + if errors: + return JSONResponse(content={"status": "error", "message": "\n".join(errors)}, status_code=400) + + return JSONResponse(content={"status": "success", "data": Scanner.get(name)}, status_code=200) + + def main(standalone=False): if not standalone: return app |