Compare commits

...

5 Commits

Author SHA1 Message Date
Chandrapal Badshah
430939c02e Update lighthouse serializer 2025-06-03 11:31:30 +05:30
Chandrapal Badshah
a9ed8d1fd0 Fix view tests 2025-06-02 21:02:12 +05:30
Chandrapal Badshah
0689326c28 Fix Lighthouse PR review comments 2025-06-02 20:31:10 +05:30
Chandrapal Badshah
f4febf9e08 Fix regex check in creation of config 2025-05-27 16:28:57 +05:30
Chandrapal Badshah
57acb43251 Add Lighthouse django endpoints 2025-05-27 16:28:57 +05:30
10 changed files with 951 additions and 4 deletions

135
api/poetry.lock generated
View File

@@ -1448,6 +1448,18 @@ files = [
graph = ["objgraph (>=1.7.2)"]
profile = ["gprof2dot (>=2022.7.29)"]
[[package]]
name = "distro"
version = "1.9.0"
description = "Distro - an OS platform information API"
optional = false
python-versions = ">=3.6"
groups = ["main"]
files = [
{file = "distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2"},
{file = "distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed"},
]
[[package]]
name = "dj-rest-auth"
version = "7.0.1"
@@ -2470,6 +2482,93 @@ MarkupSafe = ">=2.0"
[package.extras]
i18n = ["Babel (>=2.7)"]
[[package]]
name = "jiter"
version = "0.10.0"
description = "Fast iterable JSON parser."
optional = false
python-versions = ">=3.9"
groups = ["main"]
files = [
{file = "jiter-0.10.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:cd2fb72b02478f06a900a5782de2ef47e0396b3e1f7d5aba30daeb1fce66f303"},
{file = "jiter-0.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:32bb468e3af278f095d3fa5b90314728a6916d89ba3d0ffb726dd9bf7367285e"},
{file = "jiter-0.10.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa8b3e0068c26ddedc7abc6fac37da2d0af16b921e288a5a613f4b86f050354f"},
{file = "jiter-0.10.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:286299b74cc49e25cd42eea19b72aa82c515d2f2ee12d11392c56d8701f52224"},
{file = "jiter-0.10.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6ed5649ceeaeffc28d87fb012d25a4cd356dcd53eff5acff1f0466b831dda2a7"},
{file = "jiter-0.10.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2ab0051160cb758a70716448908ef14ad476c3774bd03ddce075f3c1f90a3d6"},
{file = "jiter-0.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03997d2f37f6b67d2f5c475da4412be584e1cec273c1cfc03d642c46db43f8cf"},
{file = "jiter-0.10.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c404a99352d839fed80d6afd6c1d66071f3bacaaa5c4268983fc10f769112e90"},
{file = "jiter-0.10.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:66e989410b6666d3ddb27a74c7e50d0829704ede652fd4c858e91f8d64b403d0"},
{file = "jiter-0.10.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b532d3af9ef4f6374609a3bcb5e05a1951d3bf6190dc6b176fdb277c9bbf15ee"},
{file = "jiter-0.10.0-cp310-cp310-win32.whl", hash = "sha256:da9be20b333970e28b72edc4dff63d4fec3398e05770fb3205f7fb460eb48dd4"},
{file = "jiter-0.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:f59e533afed0c5b0ac3eba20d2548c4a550336d8282ee69eb07b37ea526ee4e5"},
{file = "jiter-0.10.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:3bebe0c558e19902c96e99217e0b8e8b17d570906e72ed8a87170bc290b1e978"},
{file = "jiter-0.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:558cc7e44fd8e507a236bee6a02fa17199ba752874400a0ca6cd6e2196cdb7dc"},
{file = "jiter-0.10.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d613e4b379a07d7c8453c5712ce7014e86c6ac93d990a0b8e7377e18505e98d"},
{file = "jiter-0.10.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f62cf8ba0618eda841b9bf61797f21c5ebd15a7a1e19daab76e4e4b498d515b2"},
{file = "jiter-0.10.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:919d139cdfa8ae8945112398511cb7fca58a77382617d279556b344867a37e61"},
{file = "jiter-0.10.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:13ddbc6ae311175a3b03bd8994881bc4635c923754932918e18da841632349db"},
{file = "jiter-0.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c440ea003ad10927a30521a9062ce10b5479592e8a70da27f21eeb457b4a9c5"},
{file = "jiter-0.10.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:dc347c87944983481e138dea467c0551080c86b9d21de6ea9306efb12ca8f606"},
{file = "jiter-0.10.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:13252b58c1f4d8c5b63ab103c03d909e8e1e7842d302473f482915d95fefd605"},
{file = "jiter-0.10.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7d1bbf3c465de4a24ab12fb7766a0003f6f9bce48b8b6a886158c4d569452dc5"},
{file = "jiter-0.10.0-cp311-cp311-win32.whl", hash = "sha256:db16e4848b7e826edca4ccdd5b145939758dadf0dc06e7007ad0e9cfb5928ae7"},
{file = "jiter-0.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:9c9c1d5f10e18909e993f9641f12fe1c77b3e9b533ee94ffa970acc14ded3812"},
{file = "jiter-0.10.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:1e274728e4a5345a6dde2d343c8da018b9d4bd4350f5a472fa91f66fda44911b"},
{file = "jiter-0.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7202ae396446c988cb2a5feb33a543ab2165b786ac97f53b59aafb803fef0744"},
{file = "jiter-0.10.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23ba7722d6748b6920ed02a8f1726fb4b33e0fd2f3f621816a8b486c66410ab2"},
{file = "jiter-0.10.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:371eab43c0a288537d30e1f0b193bc4eca90439fc08a022dd83e5e07500ed026"},
{file = "jiter-0.10.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6c675736059020365cebc845a820214765162728b51ab1e03a1b7b3abb70f74c"},
{file = "jiter-0.10.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0c5867d40ab716e4684858e4887489685968a47e3ba222e44cde6e4a2154f959"},
{file = "jiter-0.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:395bb9a26111b60141757d874d27fdea01b17e8fac958b91c20128ba8f4acc8a"},
{file = "jiter-0.10.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6842184aed5cdb07e0c7e20e5bdcfafe33515ee1741a6835353bb45fe5d1bd95"},
{file = "jiter-0.10.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:62755d1bcea9876770d4df713d82606c8c1a3dca88ff39046b85a048566d56ea"},
{file = "jiter-0.10.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:533efbce2cacec78d5ba73a41756beff8431dfa1694b6346ce7af3a12c42202b"},
{file = "jiter-0.10.0-cp312-cp312-win32.whl", hash = "sha256:8be921f0cadd245e981b964dfbcd6fd4bc4e254cdc069490416dd7a2632ecc01"},
{file = "jiter-0.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:a7c7d785ae9dda68c2678532a5a1581347e9c15362ae9f6e68f3fdbfb64f2e49"},
{file = "jiter-0.10.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:e0588107ec8e11b6f5ef0e0d656fb2803ac6cf94a96b2b9fc675c0e3ab5e8644"},
{file = "jiter-0.10.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cafc4628b616dc32530c20ee53d71589816cf385dd9449633e910d596b1f5c8a"},
{file = "jiter-0.10.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:520ef6d981172693786a49ff5b09eda72a42e539f14788124a07530f785c3ad6"},
{file = "jiter-0.10.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:554dedfd05937f8fc45d17ebdf298fe7e0c77458232bcb73d9fbbf4c6455f5b3"},
{file = "jiter-0.10.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5bc299da7789deacf95f64052d97f75c16d4fc8c4c214a22bf8d859a4288a1c2"},
{file = "jiter-0.10.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5161e201172de298a8a1baad95eb85db4fb90e902353b1f6a41d64ea64644e25"},
{file = "jiter-0.10.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e2227db6ba93cb3e2bf67c87e594adde0609f146344e8207e8730364db27041"},
{file = "jiter-0.10.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:15acb267ea5e2c64515574b06a8bf393fbfee6a50eb1673614aa45f4613c0cca"},
{file = "jiter-0.10.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:901b92f2e2947dc6dfcb52fd624453862e16665ea909a08398dde19c0731b7f4"},
{file = "jiter-0.10.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:d0cb9a125d5a3ec971a094a845eadde2db0de85b33c9f13eb94a0c63d463879e"},
{file = "jiter-0.10.0-cp313-cp313-win32.whl", hash = "sha256:48a403277ad1ee208fb930bdf91745e4d2d6e47253eedc96e2559d1e6527006d"},
{file = "jiter-0.10.0-cp313-cp313-win_amd64.whl", hash = "sha256:75f9eb72ecb640619c29bf714e78c9c46c9c4eaafd644bf78577ede459f330d4"},
{file = "jiter-0.10.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:28ed2a4c05a1f32ef0e1d24c2611330219fed727dae01789f4a335617634b1ca"},
{file = "jiter-0.10.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14a4c418b1ec86a195f1ca69da8b23e8926c752b685af665ce30777233dfe070"},
{file = "jiter-0.10.0-cp313-cp313t-win_amd64.whl", hash = "sha256:d7bfed2fe1fe0e4dda6ef682cee888ba444b21e7a6553e03252e4feb6cf0adca"},
{file = "jiter-0.10.0-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:5e9251a5e83fab8d87799d3e1a46cb4b7f2919b895c6f4483629ed2446f66522"},
{file = "jiter-0.10.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:023aa0204126fe5b87ccbcd75c8a0d0261b9abdbbf46d55e7ae9f8e22424eeb8"},
{file = "jiter-0.10.0-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c189c4f1779c05f75fc17c0c1267594ed918996a231593a21a5ca5438445216"},
{file = "jiter-0.10.0-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:15720084d90d1098ca0229352607cd68256c76991f6b374af96f36920eae13c4"},
{file = "jiter-0.10.0-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e4f2fb68e5f1cfee30e2b2a09549a00683e0fde4c6a2ab88c94072fc33cb7426"},
{file = "jiter-0.10.0-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ce541693355fc6da424c08b7edf39a2895f58d6ea17d92cc2b168d20907dee12"},
{file = "jiter-0.10.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31c50c40272e189d50006ad5c73883caabb73d4e9748a688b216e85a9a9ca3b9"},
{file = "jiter-0.10.0-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fa3402a2ff9815960e0372a47b75c76979d74402448509ccd49a275fa983ef8a"},
{file = "jiter-0.10.0-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:1956f934dca32d7bb647ea21d06d93ca40868b505c228556d3373cbd255ce853"},
{file = "jiter-0.10.0-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:fcedb049bdfc555e261d6f65a6abe1d5ad68825b7202ccb9692636c70fcced86"},
{file = "jiter-0.10.0-cp314-cp314-win32.whl", hash = "sha256:ac509f7eccca54b2a29daeb516fb95b6f0bd0d0d8084efaf8ed5dfc7b9f0b357"},
{file = "jiter-0.10.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:5ed975b83a2b8639356151cef5c0d597c68376fc4922b45d0eb384ac058cfa00"},
{file = "jiter-0.10.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3aa96f2abba33dc77f79b4cf791840230375f9534e5fac927ccceb58c5e604a5"},
{file = "jiter-0.10.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:bd6292a43c0fc09ce7c154ec0fa646a536b877d1e8f2f96c19707f65355b5a4d"},
{file = "jiter-0.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:39de429dcaeb6808d75ffe9effefe96a4903c6a4b376b2f6d08d77c1aaee2f18"},
{file = "jiter-0.10.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52ce124f13a7a616fad3bb723f2bfb537d78239d1f7f219566dc52b6f2a9e48d"},
{file = "jiter-0.10.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:166f3606f11920f9a1746b2eea84fa2c0a5d50fd313c38bdea4edc072000b0af"},
{file = "jiter-0.10.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:28dcecbb4ba402916034fc14eba7709f250c4d24b0c43fc94d187ee0580af181"},
{file = "jiter-0.10.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:86c5aa6910f9bebcc7bc4f8bc461aff68504388b43bfe5e5c0bd21efa33b52f4"},
{file = "jiter-0.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ceeb52d242b315d7f1f74b441b6a167f78cea801ad7c11c36da77ff2d42e8a28"},
{file = "jiter-0.10.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ff76d8887c8c8ee1e772274fcf8cc1071c2c58590d13e33bd12d02dc9a560397"},
{file = "jiter-0.10.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a9be4d0fa2b79f7222a88aa488bd89e2ae0a0a5b189462a12def6ece2faa45f1"},
{file = "jiter-0.10.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9ab7fd8738094139b6c1ab1822d6f2000ebe41515c537235fd45dabe13ec9324"},
{file = "jiter-0.10.0-cp39-cp39-win32.whl", hash = "sha256:5f51e048540dd27f204ff4a87f5d79294ea0aa3aa552aca34934588cf27023cf"},
{file = "jiter-0.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:1b28302349dc65703a9e4ead16f163b1c339efffbe1049c30a44b001a2a4fff9"},
{file = "jiter-0.10.0.tar.gz", hash = "sha256:07a7142c38aacc85194391108dc91b5b57093c978a9932bd86a36862759d9500"},
]
[[package]]
name = "jmespath"
version = "1.0.1"
@@ -3221,6 +3320,33 @@ rsa = ["cryptography (>=3.0.0)"]
signals = ["blinker (>=1.4.0)"]
signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"]
[[package]]
name = "openai"
version = "1.82.0"
description = "The official Python library for the openai API"
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "openai-1.82.0-py3-none-any.whl", hash = "sha256:8c40647fea1816516cb3de5189775b30b5f4812777e40b8768f361f232b61b30"},
{file = "openai-1.82.0.tar.gz", hash = "sha256:b0a009b9a58662d598d07e91e4219ab4b1e3d8ba2db3f173896a92b9b874d1a7"},
]
[package.dependencies]
anyio = ">=3.5.0,<5"
distro = ">=1.7.0,<2"
httpx = ">=0.23.0,<1"
jiter = ">=0.4.0,<1"
pydantic = ">=1.9.0,<3"
sniffio = "*"
tqdm = ">4"
typing-extensions = ">=4.11,<5"
[package.extras]
datalib = ["numpy (>=1)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)"]
realtime = ["websockets (>=13,<16)"]
voice-helpers = ["numpy (>=2.0.2)", "sounddevice (>=0.5.1)"]
[[package]]
name = "opentelemetry-api"
version = "1.32.1"
@@ -4637,6 +4763,7 @@ files = [
{file = "ruamel.yaml.clib-0.2.12-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f66efbc1caa63c088dead1c4170d148eabc9b80d95fb75b6c92ac0aad2437d76"},
{file = "ruamel.yaml.clib-0.2.12-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:22353049ba4181685023b25b5b51a574bce33e7f51c759371a7422dcae5402a6"},
{file = "ruamel.yaml.clib-0.2.12-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:932205970b9f9991b34f55136be327501903f7c66830e9760a8ffb15b07f05cd"},
{file = "ruamel.yaml.clib-0.2.12-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a52d48f4e7bf9005e8f0a89209bf9a73f7190ddf0489eee5eb51377385f59f2a"},
{file = "ruamel.yaml.clib-0.2.12-cp310-cp310-win32.whl", hash = "sha256:3eac5a91891ceb88138c113f9db04f3cebdae277f5d44eaa3651a4f573e6a5da"},
{file = "ruamel.yaml.clib-0.2.12-cp310-cp310-win_amd64.whl", hash = "sha256:ab007f2f5a87bd08ab1499bdf96f3d5c6ad4dcfa364884cb4549aa0154b13a28"},
{file = "ruamel.yaml.clib-0.2.12-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:4a6679521a58256a90b0d89e03992c15144c5f3858f40d7c18886023d7943db6"},
@@ -4645,6 +4772,7 @@ files = [
{file = "ruamel.yaml.clib-0.2.12-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:811ea1594b8a0fb466172c384267a4e5e367298af6b228931f273b111f17ef52"},
{file = "ruamel.yaml.clib-0.2.12-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:cf12567a7b565cbf65d438dec6cfbe2917d3c1bdddfce84a9930b7d35ea59642"},
{file = "ruamel.yaml.clib-0.2.12-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7dd5adc8b930b12c8fc5b99e2d535a09889941aa0d0bd06f4749e9a9397c71d2"},
{file = "ruamel.yaml.clib-0.2.12-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1492a6051dab8d912fc2adeef0e8c72216b24d57bd896ea607cb90bb0c4981d3"},
{file = "ruamel.yaml.clib-0.2.12-cp311-cp311-win32.whl", hash = "sha256:bd0a08f0bab19093c54e18a14a10b4322e1eacc5217056f3c063bd2f59853ce4"},
{file = "ruamel.yaml.clib-0.2.12-cp311-cp311-win_amd64.whl", hash = "sha256:a274fb2cb086c7a3dea4322ec27f4cb5cc4b6298adb583ab0e211a4682f241eb"},
{file = "ruamel.yaml.clib-0.2.12-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:20b0f8dc160ba83b6dcc0e256846e1a02d044e13f7ea74a3d1d56ede4e48c632"},
@@ -4653,6 +4781,7 @@ files = [
{file = "ruamel.yaml.clib-0.2.12-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:749c16fcc4a2b09f28843cda5a193e0283e47454b63ec4b81eaa2242f50e4ccd"},
{file = "ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bf165fef1f223beae7333275156ab2022cffe255dcc51c27f066b4370da81e31"},
{file = "ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:32621c177bbf782ca5a18ba4d7af0f1082a3f6e517ac2a18b3974d4edf349680"},
{file = "ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b82a7c94a498853aa0b272fd5bc67f29008da798d4f93a2f9f289feb8426a58d"},
{file = "ruamel.yaml.clib-0.2.12-cp312-cp312-win32.whl", hash = "sha256:e8c4ebfcfd57177b572e2040777b8abc537cdef58a2120e830124946aa9b42c5"},
{file = "ruamel.yaml.clib-0.2.12-cp312-cp312-win_amd64.whl", hash = "sha256:0467c5965282c62203273b838ae77c0d29d7638c8a4e3a1c8bdd3602c10904e4"},
{file = "ruamel.yaml.clib-0.2.12-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:4c8c5d82f50bb53986a5e02d1b3092b03622c02c2eb78e29bec33fd9593bae1a"},
@@ -4661,6 +4790,7 @@ files = [
{file = "ruamel.yaml.clib-0.2.12-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:96777d473c05ee3e5e3c3e999f5d23c6f4ec5b0c38c098b3a5229085f74236c6"},
{file = "ruamel.yaml.clib-0.2.12-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:3bc2a80e6420ca8b7d3590791e2dfc709c88ab9152c00eeb511c9875ce5778bf"},
{file = "ruamel.yaml.clib-0.2.12-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:e188d2699864c11c36cdfdada94d781fd5d6b0071cd9c427bceb08ad3d7c70e1"},
{file = "ruamel.yaml.clib-0.2.12-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4f6f3eac23941b32afccc23081e1f50612bdbe4e982012ef4f5797986828cd01"},
{file = "ruamel.yaml.clib-0.2.12-cp313-cp313-win32.whl", hash = "sha256:6442cb36270b3afb1b4951f060eccca1ce49f3d087ca1ca4563a6eb479cb3de6"},
{file = "ruamel.yaml.clib-0.2.12-cp313-cp313-win_amd64.whl", hash = "sha256:e5b8daf27af0b90da7bb903a876477a9e6d7270be6146906b276605997c7e9a3"},
{file = "ruamel.yaml.clib-0.2.12-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:fc4b630cd3fa2cf7fce38afa91d7cfe844a9f75d7f0f36393fa98815e911d987"},
@@ -4669,6 +4799,7 @@ files = [
{file = "ruamel.yaml.clib-0.2.12-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e2f1c3765db32be59d18ab3953f43ab62a761327aafc1594a2a1fbe038b8b8a7"},
{file = "ruamel.yaml.clib-0.2.12-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d85252669dc32f98ebcd5d36768f5d4faeaeaa2d655ac0473be490ecdae3c285"},
{file = "ruamel.yaml.clib-0.2.12-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e143ada795c341b56de9418c58d028989093ee611aa27ffb9b7f609c00d813ed"},
{file = "ruamel.yaml.clib-0.2.12-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2c59aa6170b990d8d2719323e628aaf36f3bfbc1c26279c0eeeb24d05d2d11c7"},
{file = "ruamel.yaml.clib-0.2.12-cp39-cp39-win32.whl", hash = "sha256:beffaed67936fbbeffd10966a4eb53c402fafd3d6833770516bf7314bc6ffa12"},
{file = "ruamel.yaml.clib-0.2.12-cp39-cp39-win_amd64.whl", hash = "sha256:040ae85536960525ea62868b642bdb0c2cc6021c9f9d507810c0c604e66f5a7b"},
{file = "ruamel.yaml.clib-0.2.12.tar.gz", hash = "sha256:6c8fbb13ec503f99a91901ab46e0b07ae7941cd527393187039aec586fdfd36f"},
@@ -5050,7 +5181,7 @@ version = "4.67.1"
description = "Fast, Extensible Progress Meter"
optional = false
python-versions = ">=3.7"
groups = ["dev"]
groups = ["main", "dev"]
files = [
{file = "tqdm-4.67.1-py3-none-any.whl", hash = "sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2"},
{file = "tqdm-4.67.1.tar.gz", hash = "sha256:f8aef9c52c08c13a65f30ea34f4e5aac3fd1a34959879d7e59e63027286627f2"},
@@ -5483,4 +5614,4 @@ type = ["pytest-mypy"]
[metadata]
lock-version = "2.1"
python-versions = ">=3.11,<3.13"
content-hash = "051924735a7069c8393fefc18fc2c310b196ea24ad41b8c984dc5852683d0407"
content-hash = "bd227969f24255e5b641c79c727abe98a4aab73b2689b3e6f8cfa6fae8fa5bed"

View File

@@ -27,7 +27,8 @@ dependencies = [
"psycopg2-binary==2.9.9",
"pytest-celery[redis] (>=1.0.1,<2.0.0)",
"sentry-sdk[django] (>=2.20.0,<3.0.0)",
"uuid6==2024.7.10"
"uuid6==2024.7.10",
"openai (>=1.82.0,<2.0.0)"
]
description = "Prowler's API (Django/DRF)"
license = "Apache-2.0"

View File

@@ -0,0 +1,82 @@
# Generated by Django 5.1.7 on 2025-04-10 14:54
import uuid
import django.core.validators
import django.db.models.deletion
from django.db import migrations, models
import api.rls
class Migration(migrations.Migration):
dependencies = [
("api", "0026_provider_secret_gcp_service_account"),
]
operations = [
migrations.CreateModel(
name="LighthouseConfig",
fields=[
(
"id",
models.UUIDField(
default=uuid.uuid4,
editable=False,
primary_key=True,
serialize=False,
),
),
("inserted_at", models.DateTimeField(auto_now_add=True)),
("updated_at", models.DateTimeField(auto_now=True)),
(
"name",
models.CharField(
max_length=100,
validators=[django.core.validators.MinLengthValidator(3)],
),
),
("api_key", models.BinaryField()),
("model", models.CharField(default="gpt-4o", max_length=50)),
("temperature", models.FloatField(default=0.7)),
("max_tokens", models.IntegerField(default=4000)),
(
"business_context",
models.TextField(
blank=True,
help_text="Additional business context for this AI model configuration",
null=True,
),
),
("is_active", models.BooleanField(default=True)),
(
"tenant",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="api.tenant"
),
),
],
options={
"db_table": "lighthouse_configurations",
"abstract": False,
"indexes": [
models.Index(fields=["name"], name="lighthouse_config_name_idx"),
models.Index(
fields=["is_active"], name="lighthouse_config_active_idx"
),
],
"constraints": [
models.UniqueConstraint(
fields=("tenant_id",),
name="unique_lighthouse_config_per_tenant",
),
],
},
),
migrations.AddConstraint(
model_name="lighthouseconfig",
constraint=api.rls.RowLevelSecurityConstraint(
"tenant_id",
name="rls_on_lighthouseconfig",
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
),
),
]

View File

@@ -1,8 +1,10 @@
import json
import logging
import re
from uuid import UUID, uuid4
from cryptography.fernet import Fernet
from config.custom_logging import BackendLogger
from cryptography.fernet import Fernet, InvalidToken
from django.conf import settings
from django.contrib.auth.models import AbstractBaseUser
from django.contrib.postgres.fields import ArrayField
@@ -49,6 +51,8 @@ fernet = Fernet(settings.SECRETS_ENCRYPTION_KEY.encode())
# Convert Prowler Severity enum to Django TextChoices
SeverityChoices = enum_to_choices(Severity)
logger = logging.getLogger(BackendLogger.API)
class StatusChoices(models.TextChoices):
"""
@@ -1332,3 +1336,129 @@ class ResourceScanSummary(RowLevelSecurityProtectedModel):
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
),
]
class LighthouseConfiguration(RowLevelSecurityProtectedModel):
"""
Stores configuration and API keys for LLM services.
"""
class ModelChoices(models.TextChoices):
GPT_4O_2024_11_20 = "gpt-4o-2024-11-20", _("GPT-4o v2024-11-20")
GPT_4O_2024_08_06 = "gpt-4o-2024-08-06", _("GPT-4o v2024-08-06")
GPT_4O_2024_05_13 = "gpt-4o-2024-05-13", _("GPT-4o v2024-05-13")
GPT_4O = "gpt-4o", _("GPT-4o Default")
GPT_4O_MINI_2024_07_18 = "gpt-4o-mini-2024-07-18", _("GPT-4o Mini v2024-07-18")
GPT_4O_MINI = "gpt-4o-mini", _("GPT-4o Mini Default")
id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
inserted_at = models.DateTimeField(auto_now_add=True, editable=False)
updated_at = models.DateTimeField(auto_now=True, editable=False)
name = models.CharField(
max_length=100,
validators=[MinLengthValidator(3)],
blank=False,
null=False,
help_text="Name of the configuration",
)
api_key = models.BinaryField(
blank=False, null=False, help_text="Encrypted API key for the LLM service"
)
model = models.CharField(
max_length=50,
choices=ModelChoices.choices,
blank=False,
null=False,
help_text="Must be one of the supported model names",
)
temperature = models.FloatField(default=0, help_text="Must be between 0 and 1")
max_tokens = models.IntegerField(
default=4000, help_text="Must be between 500 and 5000"
)
business_context = models.TextField(
blank=True,
null=False,
default="",
help_text="Additional business context for this AI model configuration",
)
is_active = models.BooleanField(default=True)
def __str__(self):
return self.name
def clean(self):
super().clean()
# Validate temperature
if not 0 <= self.temperature <= 1:
raise ModelValidationError(
detail="Temperature must be between 0 and 1",
code="invalid_temperature",
pointer="/data/attributes/temperature",
)
# Validate max_tokens
if not 500 <= self.max_tokens <= 5000:
raise ModelValidationError(
detail="Max tokens must be between 500 and 5000",
code="invalid_max_tokens",
pointer="/data/attributes/max_tokens",
)
@property
def api_key_decoded(self):
"""Return the decrypted API key, or None if unavailable or invalid."""
if not self.api_key:
return None
try:
decrypted_key = fernet.decrypt(bytes(self.api_key))
return decrypted_key.decode()
except InvalidToken:
logger.warning("Invalid token while decrypting API key.")
except Exception as e:
logger.exception("Unexpected error while decrypting API key: %s", e)
@api_key_decoded.setter
def api_key_decoded(self, value):
"""Store the encrypted API key."""
if not value:
raise ModelValidationError(
detail="API key is required",
code="invalid_api_key",
pointer="/data/attributes/api_key",
)
# Validate OpenAI API key format
openai_key_pattern = r"^sk-[\w-]+T3BlbkFJ[\w-]+$"
if not re.match(openai_key_pattern, value):
raise ModelValidationError(
detail="Invalid OpenAI API key format.",
code="invalid_api_key",
pointer="/data/attributes/api_key",
)
self.api_key = fernet.encrypt(value.encode())
def save(self, *args, **kwargs):
self.full_clean()
super().save(*args, **kwargs)
class Meta(RowLevelSecurityProtectedModel.Meta):
db_table = "lighthouse_configurations"
constraints = [
RowLevelSecurityConstraint(
field="tenant_id",
name="rls_on_%(class)s",
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
),
# Add unique constraint for name within a tenant
models.UniqueConstraint(
fields=["tenant_id"], name="unique_lighthouse_config_per_tenant"
),
]
class JSONAPIMeta:
resource_name = "lighthouse-configuration"

View File

@@ -5458,3 +5458,337 @@ class TestIntegrationViewSet:
{f"filter[{filter_name}]": "whatever"},
)
assert response.status_code == status.HTTP_400_BAD_REQUEST
@pytest.mark.django_db
class TestLighthouseConfigViewSet:
@pytest.fixture
def valid_config_payload(self):
return {
"data": {
"type": "lighthouse-configuration",
"attributes": {
"name": "OpenAI",
"api_key": "sk-test1234567890T3BlbkFJtest1234567890",
"model": "gpt-4o",
"temperature": 0.7,
"max_tokens": 4000,
"business_context": "Test business context",
"is_active": True,
},
}
}
@pytest.fixture
def invalid_config_payload(self):
return {
"data": {
"type": "lighthouse-configuration",
"attributes": {
"name": "T", # Too short
"api_key": "invalid-key", # Invalid format
"model": "invalid-model",
"temperature": 2.0, # Invalid range
"max_tokens": -1, # Invalid value
},
}
}
def test_lighthouse_config_list(self, authenticated_client):
response = authenticated_client.get(reverse("lighthouseconfiguration-list"))
assert response.status_code == status.HTTP_200_OK
assert response.json()["data"] == []
def test_lighthouse_config_create(self, authenticated_client, valid_config_payload):
response = authenticated_client.post(
reverse("lighthouseconfiguration-list"),
data=valid_config_payload,
content_type=API_JSON_CONTENT_TYPE,
)
assert response.status_code == status.HTTP_201_CREATED
data = response.json()["data"]
assert (
data["attributes"]["name"]
== valid_config_payload["data"]["attributes"]["name"]
)
assert (
data["attributes"]["model"]
== valid_config_payload["data"]["attributes"]["model"]
)
assert (
data["attributes"]["temperature"]
== valid_config_payload["data"]["attributes"]["temperature"]
)
assert (
data["attributes"]["max_tokens"]
== valid_config_payload["data"]["attributes"]["max_tokens"]
)
assert (
data["attributes"]["business_context"]
== valid_config_payload["data"]["attributes"]["business_context"]
)
assert (
data["attributes"]["is_active"]
== valid_config_payload["data"]["attributes"]["is_active"]
)
# Check that API key is masked with asterisks only
masked_api_key = data["attributes"]["api_key"]
assert all(
c == "*" for c in masked_api_key
), "API key should contain only asterisks"
def test_lighthouse_config_create_invalid_name_too_short(
self, authenticated_client, valid_config_payload
):
"""Test that name validation fails when too short"""
payload = valid_config_payload.copy()
payload["data"]["attributes"]["name"] = "T" # Too short
response = authenticated_client.post(
reverse("lighthouseconfiguration-list"),
data=payload,
content_type=API_JSON_CONTENT_TYPE,
)
assert response.status_code == status.HTTP_400_BAD_REQUEST
errors = response.json()["errors"]
assert any("name" in error["source"]["pointer"] for error in errors)
def test_lighthouse_config_create_invalid_api_key_format(
self, authenticated_client, valid_config_payload
):
"""Test that API key validation fails with invalid format"""
payload = valid_config_payload.copy()
payload["data"]["attributes"]["api_key"] = "invalid-key"
response = authenticated_client.post(
reverse("lighthouseconfiguration-list"),
data=payload,
content_type=API_JSON_CONTENT_TYPE,
)
assert response.status_code == status.HTTP_400_BAD_REQUEST
errors = response.json()["errors"]
assert any(
"Invalid OpenAI API key format." in error["detail"] for error in errors
)
def test_lighthouse_config_create_invalid_model(
self, authenticated_client, valid_config_payload
):
"""Test that model validation fails with invalid model name"""
payload = valid_config_payload.copy()
payload["data"]["attributes"]["model"] = "invalid-model"
response = authenticated_client.post(
reverse("lighthouseconfiguration-list"),
data=payload,
content_type=API_JSON_CONTENT_TYPE,
)
assert response.status_code == status.HTTP_400_BAD_REQUEST
errors = response.json()["errors"]
assert any("model" in error["source"]["pointer"] for error in errors)
def test_lighthouse_config_create_invalid_temperature_range(
self, authenticated_client, valid_config_payload
):
"""Test that temperature validation fails when out of range"""
payload = valid_config_payload.copy()
payload["data"]["attributes"]["temperature"] = 2.0 # Out of range
response = authenticated_client.post(
reverse("lighthouseconfiguration-list"),
data=payload,
content_type=API_JSON_CONTENT_TYPE,
)
assert response.status_code == status.HTTP_400_BAD_REQUEST
errors = response.json()["errors"]
assert any("temperature" in error["source"]["pointer"] for error in errors)
def test_lighthouse_config_create_invalid_max_tokens(
self, authenticated_client, valid_config_payload
):
"""Test that max_tokens validation fails with invalid value"""
payload = valid_config_payload.copy()
payload["data"]["attributes"]["max_tokens"] = -1 # Invalid value
response = authenticated_client.post(
reverse("lighthouseconfiguration-list"),
data=payload,
content_type=API_JSON_CONTENT_TYPE,
)
assert response.status_code == status.HTTP_400_BAD_REQUEST
errors = response.json()["errors"]
assert any("max_tokens" in error["source"]["pointer"] for error in errors)
def test_lighthouse_config_create_missing_required_fields(
self, authenticated_client
):
"""Test that validation fails when required fields are missing"""
payload = {"data": {"type": "lighthouse-configuration", "attributes": {}}}
response = authenticated_client.post(
reverse("lighthouseconfiguration-list"),
data=payload,
content_type=API_JSON_CONTENT_TYPE,
)
assert response.status_code == status.HTTP_400_BAD_REQUEST
errors = response.json()["errors"]
# Check for required fields
required_fields = ["name", "api_key", "model"]
for field in required_fields:
assert any(field in error["source"]["pointer"] for error in errors)
def test_lighthouse_config_create_duplicate(
self, authenticated_client, valid_config_payload
):
# Create first config
response = authenticated_client.post(
reverse("lighthouseconfiguration-list"),
data=valid_config_payload,
content_type=API_JSON_CONTENT_TYPE,
)
assert response.status_code == status.HTTP_201_CREATED
# Try to create second config for same tenant
response = authenticated_client.post(
reverse("lighthouseconfiguration-list"),
data=valid_config_payload,
content_type=API_JSON_CONTENT_TYPE,
)
assert response.status_code == status.HTTP_400_BAD_REQUEST
assert (
"Lighthouse configuration already exists for this tenant"
in response.json()["errors"][0]["detail"]
)
def test_lighthouse_config_retrieve(
self, authenticated_client, lighthouse_config_fixture
):
"""Test retrieving a lighthouse config"""
response = authenticated_client.get(
reverse(
"lighthouseconfiguration-detail",
kwargs={"pk": lighthouse_config_fixture.id},
)
)
assert response.status_code == status.HTTP_200_OK
data = response.json()["data"]
assert data["attributes"]["name"] == lighthouse_config_fixture.name
assert data["attributes"]["api_key"] == "*" * len(
lighthouse_config_fixture.api_key
)
def test_lighthouse_config_update(
self, authenticated_client, lighthouse_config_fixture
):
update_payload = {
"data": {
"type": "lighthouse-configuration",
"id": str(lighthouse_config_fixture.id),
"attributes": {
"name": "Updated Config",
"model": "gpt-4o-mini",
"temperature": 0.5,
},
}
}
response = authenticated_client.patch(
reverse(
"lighthouseconfiguration-detail",
kwargs={"pk": lighthouse_config_fixture.id},
),
data=update_payload,
content_type=API_JSON_CONTENT_TYPE,
)
assert response.status_code == status.HTTP_200_OK
data = response.json()["data"]
assert data["attributes"]["name"] == "Updated Config"
assert data["attributes"]["model"] == "gpt-4o-mini"
assert data["attributes"]["temperature"] == 0.5
def test_lighthouse_config_delete(
self, authenticated_client, lighthouse_config_fixture
):
config_id = lighthouse_config_fixture.id
response = authenticated_client.delete(
reverse("lighthouseconfiguration-detail", kwargs={"pk": config_id})
)
assert response.status_code == status.HTTP_204_NO_CONTENT
# Verify deletion
response = authenticated_client.get(
reverse("lighthouseconfiguration-detail", kwargs={"pk": config_id})
)
assert response.status_code == status.HTTP_404_NOT_FOUND
@patch("api.v1.views.openai.OpenAI")
def test_lighthouse_config_check_connection(
self, mock_openai, authenticated_client, lighthouse_config_fixture
):
config_id = lighthouse_config_fixture.id
# Mock successful API call
mock_client = Mock()
mock_client.models.list.return_value = Mock(
data=[Mock(id="gpt-4o"), Mock(id="gpt-4o-mini")]
)
mock_openai.return_value = mock_client
# Check connection
response = authenticated_client.get(
reverse(
"lighthouseconfiguration-check-connection", kwargs={"pk": config_id}
)
)
assert response.status_code == status.HTTP_200_OK
assert response.json()["data"]["detail"] == "Connection successful!"
assert "gpt-4o" in response.json()["data"]["available_models"]
assert "gpt-4o-mini" in response.json()["data"]["available_models"]
def test_lighthouse_config_get_key(
self, authenticated_client, lighthouse_config_fixture, valid_config_payload
):
config_id = lighthouse_config_fixture.id
expected_api_key = valid_config_payload["data"]["attributes"]["api_key"]
response = authenticated_client.get(
reverse("lighthouseconfiguration-detail", kwargs={"pk": config_id})
+ "?fields[lighthouse-config]=api_key"
)
assert response.status_code == status.HTTP_200_OK
assert response.json()["data"]["attributes"]["api_key"] == expected_api_key
def test_lighthouse_config_filters(
self, authenticated_client, lighthouse_config_fixture
):
# Test name filter
response = authenticated_client.get(
reverse("lighthouseconfiguration-list")
+ "?filter[name]="
+ lighthouse_config_fixture.name
)
assert response.status_code == status.HTTP_200_OK
assert len(response.json()["data"]) == 1
# Test model filter
response = authenticated_client.get(
reverse("lighthouseconfiguration-list") + "?filter[model]=gpt-4o"
)
assert response.status_code == status.HTTP_200_OK
assert len(response.json()["data"]) == 1
# Test is_active filter
response = authenticated_client.get(
reverse("lighthouseconfiguration-list") + "?filter[is_active]=true"
)
assert response.status_code == status.HTTP_200_OK
assert len(response.json()["data"]) == 1
def test_lighthouse_config_sorting(
self, authenticated_client, lighthouse_config_fixture
):
# Test sorting by name
response = authenticated_client.get(
reverse("lighthouseconfiguration-list") + "?sort=name"
)
assert response.status_code == status.HTTP_200_OK
assert len(response.json()["data"]) == 1
# Test sorting by inserted_at
response = authenticated_client.get(
reverse("lighthouseconfiguration-list") + "?sort=-inserted_at"
)
assert response.status_code == status.HTTP_200_OK
assert len(response.json()["data"]) == 1

View File

@@ -1,4 +1,5 @@
import json
import re
from datetime import datetime, timedelta, timezone
from django.conf import settings
@@ -20,6 +21,7 @@ from api.models import (
IntegrationProviderRelationship,
Invitation,
InvitationRoleRelationship,
LighthouseConfiguration,
Membership,
Provider,
ProviderGroup,
@@ -2128,3 +2130,137 @@ class IntegrationUpdateSerializer(BaseWriteIntegrationSerializer):
IntegrationProviderRelationship.objects.bulk_create(new_relationships)
return super().update(instance, validated_data)
class LighthouseConfigSerializer(RLSSerializer):
"""
Serializer for the LighthouseConfig model.
"""
api_key = serializers.CharField(required=False)
class Meta:
model = LighthouseConfiguration
fields = [
"id",
"name",
"api_key",
"model",
"temperature",
"max_tokens",
"business_context",
"is_active",
"inserted_at",
"updated_at",
"url",
]
extra_kwargs = {
"id": {"read_only": True},
"inserted_at": {"read_only": True},
"updated_at": {"read_only": True},
}
def to_representation(self, instance):
data = super().to_representation(instance)
# Check if api_key is specifically requested in fields param
fields_param = self.context.get("request", None) and self.context[
"request"
].query_params.get("fields[lighthouse-config]", "")
if fields_param == "api_key":
# Return decrypted key if specifically requested
data["api_key"] = instance.api_key_decoded if instance.api_key else None
else:
# Return masked key for general requests
data["api_key"] = "*" * len(instance.api_key) if instance.api_key else None
return data
class LighthouseConfigCreateSerializer(RLSSerializer, BaseWriteSerializer):
"""Serializer for creating new Lighthouse configurations."""
api_key = serializers.CharField(write_only=True, required=True)
class Meta:
model = LighthouseConfiguration
fields = [
"id",
"name",
"api_key",
"model",
"temperature",
"max_tokens",
"business_context",
"is_active",
"inserted_at",
"updated_at",
]
extra_kwargs = {
"id": {"read_only": True},
"inserted_at": {"read_only": True},
"updated_at": {"read_only": True},
}
def validate(self, attrs):
tenant_id = self.context.get("request").tenant_id
if LighthouseConfiguration.objects.filter(tenant_id=tenant_id).exists():
raise serializers.ValidationError(
{
"tenant_id": "Lighthouse configuration already exists for this tenant."
}
)
return super().validate(attrs)
def create(self, validated_data):
api_key = validated_data.pop("api_key")
# Validate API key format before creating the instance
if not api_key:
raise serializers.ValidationError({"api_key": "API key is required"})
# Validate OpenAI API key format
openai_key_pattern = r"^sk-[\w-]+T3BlbkFJ[\w-]+$"
if not re.match(openai_key_pattern, api_key):
raise serializers.ValidationError(
{"api_key": "Invalid OpenAI API key format."}
)
instance = super().create(validated_data)
instance.api_key_decoded = api_key
instance.save()
return instance
class LighthouseConfigUpdateSerializer(BaseWriteSerializer):
"""
Serializer for updating LighthouseConfig instances.
"""
api_key = serializers.CharField(write_only=True, required=False)
class Meta:
model = LighthouseConfiguration
fields = [
"id",
"name",
"api_key",
"model",
"temperature",
"max_tokens",
"business_context",
"is_active",
]
extra_kwargs = {
"id": {"read_only": True},
"name": {"required": False},
"model": {"required": False},
"temperature": {"required": False},
"max_tokens": {"required": False},
}
def update(self, instance, validated_data):
api_key = validated_data.pop("api_key", None)
instance = super().update(instance, validated_data)
if api_key:
instance.api_key_decoded = api_key
instance.save()
return instance

View File

@@ -13,6 +13,7 @@ from api.v1.views import (
IntegrationViewSet,
InvitationAcceptViewSet,
InvitationViewSet,
LighthouseConfigViewSet,
MembershipViewSet,
OverviewViewSet,
ProviderGroupProvidersRelationshipView,
@@ -49,6 +50,11 @@ router.register(
router.register(r"overviews", OverviewViewSet, basename="overview")
router.register(r"schedules", ScheduleViewSet, basename="schedule")
router.register(r"integrations", IntegrationViewSet, basename="integration")
router.register(
r"lighthouse-configuration",
LighthouseConfigViewSet,
basename="lighthouseconfiguration",
)
tenants_router = routers.NestedSimpleRouter(router, r"tenants", lookup="tenant")
tenants_router.register(

View File

@@ -2,6 +2,7 @@ import glob
import os
from datetime import datetime, timedelta, timezone
import openai
import sentry_sdk
from allauth.socialaccount.providers.github.views import GitHubOAuth2Adapter
from allauth.socialaccount.providers.google.views import GoogleOAuth2Adapter
@@ -84,6 +85,7 @@ from api.models import (
Finding,
Integration,
Invitation,
LighthouseConfiguration,
Membership,
Provider,
ProviderGroup,
@@ -126,6 +128,9 @@ from api.v1.serializers import (
InvitationCreateSerializer,
InvitationSerializer,
InvitationUpdateSerializer,
LighthouseConfigCreateSerializer,
LighthouseConfigSerializer,
LighthouseConfigUpdateSerializer,
MembershipSerializer,
OverviewFindingSerializer,
OverviewProviderSerializer,
@@ -2900,3 +2905,105 @@ class IntegrationViewSet(BaseRLSViewSet):
context = super().get_serializer_context()
context["allowed_providers"] = self.allowed_providers
return context
@extend_schema_view(
list=extend_schema(
tags=["Lighthouse"],
summary="List all Lighthouse configurations",
description="Retrieve a list of all Lighthouse configurations.",
),
create=extend_schema(
tags=["Lighthouse"],
summary="Create a new Lighthouse configuration",
description="Create a new Lighthouse configuration with the specified details.",
),
partial_update=extend_schema(
tags=["Lighthouse"],
summary="Partially update a Lighthouse configuration",
description="Update certain fields of an existing Lighthouse configuration.",
),
retrieve=extend_schema(
tags=["Lighthouse"],
summary="Retrieve a Lighthouse configuration",
description="Fetch detailed information about a specific Lighthouse configuration by its ID. Add query param `fields[lighthouse-config]=api_key` to get API key.",
),
destroy=extend_schema(
tags=["Lighthouse"],
summary="Delete a Lighthouse configuration",
description="Remove a Lighthouse configuration by its ID.",
),
check_connection=extend_schema(
tags=["Lighthouse"],
summary="Check the connection to the OpenAI API",
description="Verify the connection to the OpenAI API for a specific Lighthouse configuration.",
),
)
class LighthouseConfigViewSet(BaseRLSViewSet):
"""
API endpoint for managing Lighthouse configuration.
"""
filterset_fields = {
"name": ["exact", "icontains"],
"model": ["exact", "icontains"],
"is_active": ["exact"],
"inserted_at": ["gte", "lte"],
}
ordering_fields = ["name", "inserted_at", "updated_at", "is_active"]
ordering = ["-inserted_at"]
def get_queryset(self):
return LighthouseConfiguration.objects.filter(tenant_id=self.request.tenant_id)
def get_serializer_class(self):
if self.action == "create":
return LighthouseConfigCreateSerializer
elif self.action == "partial_update":
return LighthouseConfigUpdateSerializer
return LighthouseConfigSerializer
@action(detail=True, methods=["get"])
def check_connection(self, request, pk=None):
"""
Check the connection to the OpenAI API.
"""
instance = self.get_object()
if not instance.api_key_decoded:
return Response(
{"detail": "API key is invalid or missing."},
status=status.HTTP_400_BAD_REQUEST,
)
try:
client = openai.OpenAI(
api_key=instance.api_key_decoded,
)
models = client.models.list()
return Response(
{
"detail": "Connection successful!",
"available_models": [model.id for model in models.data],
},
status=status.HTTP_200_OK,
)
except Exception as e:
return Response(
{"detail": f"Connection failed: {str(e)}"},
status=status.HTTP_400_BAD_REQUEST,
)
def create(self, request, *args, **kwargs):
"""Create new Lighthouse configuration"""
serializer = self.get_serializer(data=request.data)
serializer.is_valid(raise_exception=True)
instance = serializer.save()
headers = self.get_success_headers(serializer.data)
return Response(
LighthouseConfigSerializer(
instance, context=self.get_serializer_context()
).data,
status=status.HTTP_201_CREATED,
headers=headers,
)

View File

@@ -19,6 +19,7 @@ from api.models import (
Integration,
IntegrationProviderRelationship,
Invitation,
LighthouseConfiguration,
Membership,
Provider,
ProviderGroup,
@@ -929,6 +930,20 @@ def backfill_scan_metadata_fixture(scans_fixture, findings_fixture):
backfill_resource_scan_summaries(tenant_id=tenant_id, scan_id=scan_id)
@pytest.fixture
def lighthouse_config_fixture(authenticated_client, tenants_fixture):
return LighthouseConfiguration.objects.create(
tenant_id=tenants_fixture[0].id,
name="OpenAI",
api_key_decoded="sk-test1234567890T3BlbkFJtest1234567890",
model="gpt-4o",
temperature=0,
max_tokens=4000,
business_context="Test business context",
is_active=True,
)
@pytest.fixture(scope="function")
def latest_scan_finding(authenticated_client, providers_fixture, resources_fixture):
provider = providers_fixture[0]

5
poetry.lock generated
View File

@@ -4636,6 +4636,7 @@ files = [
{file = "ruamel.yaml.clib-0.2.12-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f66efbc1caa63c088dead1c4170d148eabc9b80d95fb75b6c92ac0aad2437d76"},
{file = "ruamel.yaml.clib-0.2.12-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:22353049ba4181685023b25b5b51a574bce33e7f51c759371a7422dcae5402a6"},
{file = "ruamel.yaml.clib-0.2.12-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:932205970b9f9991b34f55136be327501903f7c66830e9760a8ffb15b07f05cd"},
{file = "ruamel.yaml.clib-0.2.12-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a52d48f4e7bf9005e8f0a89209bf9a73f7190ddf0489eee5eb51377385f59f2a"},
{file = "ruamel.yaml.clib-0.2.12-cp310-cp310-win32.whl", hash = "sha256:3eac5a91891ceb88138c113f9db04f3cebdae277f5d44eaa3651a4f573e6a5da"},
{file = "ruamel.yaml.clib-0.2.12-cp310-cp310-win_amd64.whl", hash = "sha256:ab007f2f5a87bd08ab1499bdf96f3d5c6ad4dcfa364884cb4549aa0154b13a28"},
{file = "ruamel.yaml.clib-0.2.12-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:4a6679521a58256a90b0d89e03992c15144c5f3858f40d7c18886023d7943db6"},
@@ -4644,6 +4645,7 @@ files = [
{file = "ruamel.yaml.clib-0.2.12-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:811ea1594b8a0fb466172c384267a4e5e367298af6b228931f273b111f17ef52"},
{file = "ruamel.yaml.clib-0.2.12-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:cf12567a7b565cbf65d438dec6cfbe2917d3c1bdddfce84a9930b7d35ea59642"},
{file = "ruamel.yaml.clib-0.2.12-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7dd5adc8b930b12c8fc5b99e2d535a09889941aa0d0bd06f4749e9a9397c71d2"},
{file = "ruamel.yaml.clib-0.2.12-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1492a6051dab8d912fc2adeef0e8c72216b24d57bd896ea607cb90bb0c4981d3"},
{file = "ruamel.yaml.clib-0.2.12-cp311-cp311-win32.whl", hash = "sha256:bd0a08f0bab19093c54e18a14a10b4322e1eacc5217056f3c063bd2f59853ce4"},
{file = "ruamel.yaml.clib-0.2.12-cp311-cp311-win_amd64.whl", hash = "sha256:a274fb2cb086c7a3dea4322ec27f4cb5cc4b6298adb583ab0e211a4682f241eb"},
{file = "ruamel.yaml.clib-0.2.12-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:20b0f8dc160ba83b6dcc0e256846e1a02d044e13f7ea74a3d1d56ede4e48c632"},
@@ -4652,6 +4654,7 @@ files = [
{file = "ruamel.yaml.clib-0.2.12-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:749c16fcc4a2b09f28843cda5a193e0283e47454b63ec4b81eaa2242f50e4ccd"},
{file = "ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bf165fef1f223beae7333275156ab2022cffe255dcc51c27f066b4370da81e31"},
{file = "ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:32621c177bbf782ca5a18ba4d7af0f1082a3f6e517ac2a18b3974d4edf349680"},
{file = "ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b82a7c94a498853aa0b272fd5bc67f29008da798d4f93a2f9f289feb8426a58d"},
{file = "ruamel.yaml.clib-0.2.12-cp312-cp312-win32.whl", hash = "sha256:e8c4ebfcfd57177b572e2040777b8abc537cdef58a2120e830124946aa9b42c5"},
{file = "ruamel.yaml.clib-0.2.12-cp312-cp312-win_amd64.whl", hash = "sha256:0467c5965282c62203273b838ae77c0d29d7638c8a4e3a1c8bdd3602c10904e4"},
{file = "ruamel.yaml.clib-0.2.12-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:4c8c5d82f50bb53986a5e02d1b3092b03622c02c2eb78e29bec33fd9593bae1a"},
@@ -4660,6 +4663,7 @@ files = [
{file = "ruamel.yaml.clib-0.2.12-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:96777d473c05ee3e5e3c3e999f5d23c6f4ec5b0c38c098b3a5229085f74236c6"},
{file = "ruamel.yaml.clib-0.2.12-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:3bc2a80e6420ca8b7d3590791e2dfc709c88ab9152c00eeb511c9875ce5778bf"},
{file = "ruamel.yaml.clib-0.2.12-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:e188d2699864c11c36cdfdada94d781fd5d6b0071cd9c427bceb08ad3d7c70e1"},
{file = "ruamel.yaml.clib-0.2.12-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4f6f3eac23941b32afccc23081e1f50612bdbe4e982012ef4f5797986828cd01"},
{file = "ruamel.yaml.clib-0.2.12-cp313-cp313-win32.whl", hash = "sha256:6442cb36270b3afb1b4951f060eccca1ce49f3d087ca1ca4563a6eb479cb3de6"},
{file = "ruamel.yaml.clib-0.2.12-cp313-cp313-win_amd64.whl", hash = "sha256:e5b8daf27af0b90da7bb903a876477a9e6d7270be6146906b276605997c7e9a3"},
{file = "ruamel.yaml.clib-0.2.12-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:fc4b630cd3fa2cf7fce38afa91d7cfe844a9f75d7f0f36393fa98815e911d987"},
@@ -4668,6 +4672,7 @@ files = [
{file = "ruamel.yaml.clib-0.2.12-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e2f1c3765db32be59d18ab3953f43ab62a761327aafc1594a2a1fbe038b8b8a7"},
{file = "ruamel.yaml.clib-0.2.12-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d85252669dc32f98ebcd5d36768f5d4faeaeaa2d655ac0473be490ecdae3c285"},
{file = "ruamel.yaml.clib-0.2.12-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e143ada795c341b56de9418c58d028989093ee611aa27ffb9b7f609c00d813ed"},
{file = "ruamel.yaml.clib-0.2.12-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2c59aa6170b990d8d2719323e628aaf36f3bfbc1c26279c0eeeb24d05d2d11c7"},
{file = "ruamel.yaml.clib-0.2.12-cp39-cp39-win32.whl", hash = "sha256:beffaed67936fbbeffd10966a4eb53c402fafd3d6833770516bf7314bc6ffa12"},
{file = "ruamel.yaml.clib-0.2.12-cp39-cp39-win_amd64.whl", hash = "sha256:040ae85536960525ea62868b642bdb0c2cc6021c9f9d507810c0c604e66f5a7b"},
{file = "ruamel.yaml.clib-0.2.12.tar.gz", hash = "sha256:6c8fbb13ec503f99a91901ab46e0b07ae7941cd527393187039aec586fdfd36f"},