Added precommit badge in readme (#201)
* Added precommit badge in readme Signed-off-by: Ritwik G <100672805+ritwik-g@users.noreply.github.com> * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * Added sonar cloud badges Signed-off-by: Ritwik G <100672805+ritwik-g@users.noreply.github.com> * CLA badge added Signed-off-by: Ritwik G <100672805+ritwik-g@users.noreply.github.com> * Commented out `language: system` in pre-commit config Signed-off-by: Ritwik G <100672805+ritwik-g@users.noreply.github.com> * Update .pre-commit-config.yaml Signed-off-by: Ritwik G <100672805+ritwik-g@users.noreply.github.com> * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * Update .pre-commit-config.yaml Signed-off-by: Ritwik G <100672805+ritwik-g@users.noreply.github.com> * Commented out failing checks Signed-off-by: Ritwik G <100672805+ritwik-g@users.noreply.github.com> --------- Signed-off-by: Ritwik G <100672805+ritwik-g@users.noreply.github.com> Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
This commit is contained in:
2
.github/workflows/production-build.yaml
vendored
2
.github/workflows/production-build.yaml
vendored
@@ -11,7 +11,7 @@ jobs:
|
||||
build-and-push:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
matrix:
|
||||
service_name: [backend, frontend, document-service, platform-service, prompt-service, worker, x2text-service]
|
||||
|
||||
steps:
|
||||
|
||||
@@ -40,33 +40,33 @@ repos:
|
||||
hooks:
|
||||
- id: yamllint
|
||||
args: ["-d", "relaxed"]
|
||||
language: system
|
||||
- repo: https://github.com/rhysd/actionlint
|
||||
rev: v1.6.27
|
||||
hooks:
|
||||
- id: actionlint-docker
|
||||
args: [-ignore, 'label ".+" is unknown']
|
||||
# language: system
|
||||
# - repo: https://github.com/rhysd/actionlint
|
||||
# rev: v1.6.27
|
||||
# hooks:
|
||||
# - id: actionlint-docker
|
||||
# args: [-ignore, 'label ".+" is unknown']
|
||||
- repo: https://github.com/psf/black
|
||||
rev: 24.3.0
|
||||
hooks:
|
||||
- id: black
|
||||
args: [--config=pyproject.toml, -l 80]
|
||||
language: system
|
||||
# language: system
|
||||
exclude: |
|
||||
(?x)^(
|
||||
unstract/flags/src/unstract/flags/evaluation_.*\.py|
|
||||
)$
|
||||
- repo: https://github.com/pycqa/flake8
|
||||
rev: 7.0.0
|
||||
hooks:
|
||||
- id: flake8
|
||||
args: [--max-line-length=80]
|
||||
exclude: |
|
||||
(?x)^(
|
||||
.*migrations/.*\.py|
|
||||
core/tests/.*|
|
||||
unstract/flags/src/unstract/flags/evaluation_.*\.py|
|
||||
)$
|
||||
# - repo: https://github.com/pycqa/flake8
|
||||
# rev: 7.0.0
|
||||
# hooks:
|
||||
# - id: flake8
|
||||
# args: [--max-line-length=80]
|
||||
# exclude: |
|
||||
# (?x)^(
|
||||
# .*migrations/.*\.py|
|
||||
# core/tests/.*|
|
||||
# unstract/flags/src/unstract/flags/evaluation_.*\.py|
|
||||
# )$
|
||||
- repo: https://github.com/pycqa/isort
|
||||
rev: 5.13.2
|
||||
hooks:
|
||||
@@ -104,35 +104,35 @@ repos:
|
||||
rev: v8.18.2
|
||||
hooks:
|
||||
- id: gitleaks
|
||||
- repo: https://github.com/hadolint/hadolint
|
||||
rev: v2.12.1-beta
|
||||
hooks:
|
||||
- id: hadolint-docker
|
||||
args:
|
||||
- --ignore=DL3003
|
||||
- --ignore=DL3008
|
||||
- --ignore=DL3013
|
||||
- --ignore=DL3018
|
||||
- --ignore=SC1091
|
||||
files: Dockerfile$
|
||||
# - repo: https://github.com/hadolint/hadolint
|
||||
# rev: v2.12.1-beta
|
||||
# hooks:
|
||||
# - id: hadolint-docker
|
||||
# args:
|
||||
# - --ignore=DL3003
|
||||
# - --ignore=DL3008
|
||||
# - --ignore=DL3013
|
||||
# - --ignore=DL3018
|
||||
# - --ignore=SC1091
|
||||
# files: Dockerfile$
|
||||
- repo: https://github.com/asottile/yesqa
|
||||
rev: v1.5.0
|
||||
hooks:
|
||||
- id: yesqa
|
||||
- repo: https://github.com/pre-commit/mirrors-eslint
|
||||
rev: "v9.0.0-beta.2" # Use the sha / tag you want to point at
|
||||
hooks:
|
||||
- id: eslint
|
||||
args: [--config=frontend/.eslintrc.json]
|
||||
files: \.[jt]sx?$ # *.js, *.jsx, *.ts and *.tsx
|
||||
types: [file]
|
||||
additional_dependencies:
|
||||
- eslint@8.41.0
|
||||
- eslint-config-google@0.14.0
|
||||
- eslint-config-prettier@8.8.0
|
||||
- eslint-plugin-prettier@4.2.1
|
||||
- eslint-plugin-react@7.32.2
|
||||
- eslint-plugin-import@2.25.2
|
||||
# - repo: https://github.com/pre-commit/mirrors-eslint
|
||||
# rev: "v9.0.0-beta.2" # Use the sha / tag you want to point at
|
||||
# hooks:
|
||||
# - id: eslint
|
||||
# args: [--config=frontend/.eslintrc.json]
|
||||
# files: \.[jt]sx?$ # *.js, *.jsx, *.ts and *.tsx
|
||||
# types: [file]
|
||||
# additional_dependencies:
|
||||
# - eslint@8.41.0
|
||||
# - eslint-config-google@0.14.0
|
||||
# - eslint-config-prettier@8.8.0
|
||||
# - eslint-plugin-prettier@4.2.1
|
||||
# - eslint-plugin-react@7.32.2
|
||||
# - eslint-plugin-import@2.25.2
|
||||
- repo: https://github.com/Lucas-C/pre-commit-hooks-nodejs
|
||||
rev: v1.1.2
|
||||
hooks:
|
||||
@@ -155,16 +155,16 @@ repos:
|
||||
rev: 2.12.4
|
||||
hooks:
|
||||
- id: pdm-lock-check
|
||||
- repo: local
|
||||
hooks:
|
||||
- id: run-mypy
|
||||
name: Run mypy
|
||||
entry: sh -c 'pdm run mypy .'
|
||||
language: system
|
||||
pass_filenames: false
|
||||
- id: check-django-migrations
|
||||
name: Check django migrations
|
||||
entry: sh -c 'pdm run docker/scripts/check_django_migrations.sh'
|
||||
language: system
|
||||
types: [python] # hook only runs if a python file is staged
|
||||
pass_filenames: false
|
||||
# - repo: local
|
||||
# hooks:
|
||||
# - id: run-mypy
|
||||
# name: Run mypy
|
||||
# entry: sh -c 'pdm run mypy .'
|
||||
# language: system
|
||||
# pass_filenames: false
|
||||
# - id: check-django-migrations
|
||||
# name: Check django migrations
|
||||
# entry: sh -c 'pdm run docker/scripts/check_django_migrations.sh'
|
||||
# language: system
|
||||
# types: [python] # hook only runs if a python file is staged
|
||||
# pass_filenames: false
|
||||
|
||||
10
README.md
10
README.md
@@ -5,6 +5,14 @@
|
||||
|
||||
## No-code LLM Platform to launch APIs and ETL Pipelines to structure unstructured documents
|
||||
|
||||
[](https://cla-assistant.io/Zipstack/unstract)
|
||||
[](https://results.pre-commit.ci/latest/github/Zipstack/unstract/main)
|
||||
[](https://sonarcloud.io/summary/new_code?id=Zipstack_unstract)
|
||||
[](https://sonarcloud.io/summary/new_code?id=Zipstack_unstract)
|
||||
[](https://sonarcloud.io/summary/new_code?id=Zipstack_unstract)
|
||||
[](https://sonarcloud.io/summary/new_code?id=Zipstack_unstract)
|
||||
[](https://sonarcloud.io/summary/new_code?id=Zipstack_unstract)
|
||||
|
||||
</div>
|
||||
|
||||
## 🤖 Go beyond co-pilots
|
||||
@@ -121,4 +129,4 @@ Contributions are welcome! Please read [CONTRIBUTE.md](CONTRIBUTE.md) for furthe
|
||||
|
||||
## 👋 Join the LLM-powered automation community
|
||||
|
||||
[Join great conversations](https://join-slack.unstract.com) around LLMs, their ecosystem and leveraging them to automate the previously unautomatable!
|
||||
[Join great conversations](https://join-slack.unstract.com) around LLMs, their ecosystem and leveraging them to automate the previously unautomatable!
|
||||
|
||||
@@ -2,4 +2,4 @@ from django.contrib import admin
|
||||
|
||||
from .models import Organization, User
|
||||
|
||||
admin.site.register([Organization, User])
|
||||
admin.site.register([Organization, User])
|
||||
|
||||
@@ -121,10 +121,10 @@ class AuthenticationController:
|
||||
return redirect(f"{settings.ERROR_URL}")
|
||||
|
||||
if member.organization_id and member.role and len(member.role) > 0:
|
||||
organization: Optional[
|
||||
Organization
|
||||
] = OrganizationService.get_organization_by_org_id(
|
||||
member.organization_id
|
||||
organization: Optional[Organization] = (
|
||||
OrganizationService.get_organization_by_org_id(
|
||||
member.organization_id
|
||||
)
|
||||
)
|
||||
if organization:
|
||||
try:
|
||||
@@ -192,9 +192,9 @@ class AuthenticationController:
|
||||
new_organization = False
|
||||
organization_ids = CacheService.get_user_organizations(user.user_id)
|
||||
if not organization_ids:
|
||||
z_organizations: list[
|
||||
OrganizationData
|
||||
] = self.auth_service.get_organizations_by_user_id(user.user_id)
|
||||
z_organizations: list[OrganizationData] = (
|
||||
self.auth_service.get_organizations_by_user_id(user.user_id)
|
||||
)
|
||||
organization_ids = {org.id for org in z_organizations}
|
||||
if organization_id and organization_id in organization_ids:
|
||||
organization = OrganizationService.get_organization_by_org_id(
|
||||
@@ -242,9 +242,9 @@ class AuthenticationController:
|
||||
},
|
||||
)
|
||||
# Update user session data in redis
|
||||
user_session_info: dict[
|
||||
str, Any
|
||||
] = CacheService.get_user_session_info(user.email)
|
||||
user_session_info: dict[str, Any] = (
|
||||
CacheService.get_user_session_info(user.email)
|
||||
)
|
||||
user_session_info["current_org"] = organization_id
|
||||
CacheService.set_user_session_info(user_session_info)
|
||||
response.set_cookie(Cookie.ORG_ID, organization_id)
|
||||
|
||||
@@ -29,7 +29,10 @@ class Migration(migrations.Migration):
|
||||
verbose_name="ID",
|
||||
),
|
||||
),
|
||||
("password", models.CharField(max_length=128, verbose_name="password")),
|
||||
(
|
||||
"password",
|
||||
models.CharField(max_length=128, verbose_name="password"),
|
||||
),
|
||||
(
|
||||
"last_login",
|
||||
models.DateTimeField(
|
||||
@@ -96,7 +99,8 @@ class Migration(migrations.Migration):
|
||||
(
|
||||
"date_joined",
|
||||
models.DateTimeField(
|
||||
default=django.utils.timezone.now, verbose_name="date joined"
|
||||
default=django.utils.timezone.now,
|
||||
verbose_name="date joined",
|
||||
),
|
||||
),
|
||||
("user_id", models.CharField()),
|
||||
@@ -218,9 +222,14 @@ class Migration(migrations.Migration):
|
||||
),
|
||||
(
|
||||
"domain",
|
||||
models.CharField(db_index=True, max_length=253, unique=True),
|
||||
models.CharField(
|
||||
db_index=True, max_length=253, unique=True
|
||||
),
|
||||
),
|
||||
(
|
||||
"is_primary",
|
||||
models.BooleanField(db_index=True, default=True),
|
||||
),
|
||||
("is_primary", models.BooleanField(db_index=True, default=True)),
|
||||
(
|
||||
"tenant",
|
||||
models.ForeignKey(
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
# Generated by Django 4.2.1 on 2023-11-02 05:22
|
||||
|
||||
import uuid
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
import uuid
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
@@ -27,7 +28,9 @@ class Migration(migrations.Migration):
|
||||
("key", models.UUIDField(default=uuid.uuid4)),
|
||||
(
|
||||
"key_name",
|
||||
models.CharField(blank=True, max_length=64, null=True, unique=True),
|
||||
models.CharField(
|
||||
blank=True, max_length=64, null=True, unique=True
|
||||
),
|
||||
),
|
||||
("is_active", models.BooleanField(default=False)),
|
||||
(
|
||||
|
||||
@@ -8,6 +8,8 @@ from django.db import IntegrityError
|
||||
Logger = logging.getLogger(__name__)
|
||||
|
||||
subscription_loader = load_plugins()
|
||||
|
||||
|
||||
class OrganizationService:
|
||||
def __init__(self): # type: ignore
|
||||
pass
|
||||
@@ -36,11 +38,12 @@ class OrganizationService:
|
||||
cls = subscription_plugin[SubscriptionConfig.METADATA][
|
||||
SubscriptionConfig.METADATA_SERVICE_CLASS
|
||||
]
|
||||
cls.add(
|
||||
organization_id=organization_id)
|
||||
cls.add(organization_id=organization_id)
|
||||
|
||||
except IntegrityError as error:
|
||||
Logger.info(f"[Duplicate Id] Failed to create Organization Error: {error}")
|
||||
Logger.info(
|
||||
f"[Duplicate Id] Failed to create Organization Error: {error}"
|
||||
)
|
||||
raise error
|
||||
# Add one or more domains for the tenant
|
||||
domain = Domain()
|
||||
|
||||
@@ -24,13 +24,17 @@ def load_plugins() -> list[Any]:
|
||||
"""Iterate through the subscription plugins and register them."""
|
||||
plugins_app = apps.get_app_config(SubscriptionConfig.PLUGINS_APP)
|
||||
package_path = plugins_app.module.__package__
|
||||
subscription_dir = os.path.join(plugins_app.path, SubscriptionConfig.PLUGIN_DIR)
|
||||
subscription_package_path = f"{package_path}.{SubscriptionConfig.PLUGIN_DIR}"
|
||||
subscription_dir = os.path.join(
|
||||
plugins_app.path, SubscriptionConfig.PLUGIN_DIR
|
||||
)
|
||||
subscription_package_path = (
|
||||
f"{package_path}.{SubscriptionConfig.PLUGIN_DIR}"
|
||||
)
|
||||
subscription_plugins: list[Any] = []
|
||||
|
||||
if not os.path.exists(subscription_dir):
|
||||
return subscription_plugins
|
||||
|
||||
|
||||
for item in os.listdir(subscription_dir):
|
||||
# Loads a plugin if it is in a directory.
|
||||
if os.path.isdir(os.path.join(subscription_dir, item)):
|
||||
@@ -76,4 +80,4 @@ def load_plugins() -> list[Any]:
|
||||
if len(subscription_plugins) == 0:
|
||||
logger.info("No subscription plugins found.")
|
||||
|
||||
return subscription_plugins
|
||||
return subscription_plugins
|
||||
|
||||
@@ -15,6 +15,10 @@ urlpatterns = [
|
||||
path("logout", logout, name="logout"),
|
||||
path("callback", callback, name="callback"),
|
||||
path("organization", get_organizations, name="get_organizations"),
|
||||
path("organization/<str:id>/set", set_organization, name="set_organization"),
|
||||
path("organization/create", create_organization, name="create_organization"),
|
||||
path(
|
||||
"organization/<str:id>/set", set_organization, name="set_organization"
|
||||
),
|
||||
path(
|
||||
"organization/create", create_organization, name="create_organization"
|
||||
),
|
||||
]
|
||||
|
||||
@@ -99,18 +99,18 @@ class AdapterProcessor:
|
||||
adapter_metadata.pop(AdapterKeys.ADAPTER_TYPE)
|
||||
== AdapterKeys.X2TEXT
|
||||
):
|
||||
adapter_metadata[
|
||||
X2TextConstants.X2TEXT_HOST
|
||||
] = settings.X2TEXT_HOST
|
||||
adapter_metadata[
|
||||
X2TextConstants.X2TEXT_PORT
|
||||
] = settings.X2TEXT_PORT
|
||||
adapter_metadata[X2TextConstants.X2TEXT_HOST] = (
|
||||
settings.X2TEXT_HOST
|
||||
)
|
||||
adapter_metadata[X2TextConstants.X2TEXT_PORT] = (
|
||||
settings.X2TEXT_PORT
|
||||
)
|
||||
platform_key = (
|
||||
PlatformAuthenticationService.get_active_platform_key()
|
||||
)
|
||||
adapter_metadata[
|
||||
X2TextConstants.PLATFORM_SERVICE_API_KEY
|
||||
] = str(platform_key.key)
|
||||
adapter_metadata[X2TextConstants.PLATFORM_SERVICE_API_KEY] = (
|
||||
str(platform_key.key)
|
||||
)
|
||||
|
||||
adapter_instance = adapter_class(adapter_metadata)
|
||||
test_result: bool = adapter_instance.test_connection()
|
||||
|
||||
@@ -112,9 +112,9 @@ class AdapterViewSet(GenericViewSet):
|
||||
adapter_metadata = serializer.validated_data.get(
|
||||
AdapterKeys.ADAPTER_METADATA
|
||||
)
|
||||
adapter_metadata[
|
||||
AdapterKeys.ADAPTER_TYPE
|
||||
] = serializer.validated_data.get(AdapterKeys.ADAPTER_TYPE)
|
||||
adapter_metadata[AdapterKeys.ADAPTER_TYPE] = (
|
||||
serializer.validated_data.get(AdapterKeys.ADAPTER_TYPE)
|
||||
)
|
||||
try:
|
||||
test_result = AdapterProcessor.test_adapter(
|
||||
adapter_id=adapter_id, adapter_metadata=adapter_metadata
|
||||
|
||||
@@ -3,7 +3,6 @@ from typing import Any, Union
|
||||
|
||||
from api.constants import ApiExecution
|
||||
from api.models import APIDeployment, APIKey
|
||||
from backend.serializers import AuditSerializer
|
||||
from django.core.validators import RegexValidator
|
||||
from rest_framework.serializers import (
|
||||
CharField,
|
||||
@@ -14,6 +13,8 @@ from rest_framework.serializers import (
|
||||
ValidationError,
|
||||
)
|
||||
|
||||
from backend.serializers import AuditSerializer
|
||||
|
||||
|
||||
class APIDeploymentSerializer(AuditSerializer):
|
||||
class Meta:
|
||||
|
||||
@@ -1,4 +1,2 @@
|
||||
class AppConstants:
|
||||
"""Constants for Apps."""
|
||||
|
||||
|
||||
@@ -3,4 +3,4 @@ from rest_framework.exceptions import APIException
|
||||
|
||||
class FetchAppListFailed(APIException):
|
||||
status_code = 400
|
||||
default_detail = "Failed to fetch App list."
|
||||
default_detail = "Failed to fetch App list."
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
from django.urls import path
|
||||
from apps import views
|
||||
from django.urls import path
|
||||
from rest_framework.urlpatterns import format_suffix_patterns
|
||||
|
||||
urlpatterns = format_suffix_patterns(
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
"""This module contains the Celery configuration for the backend
|
||||
project."""
|
||||
"""This module contains the Celery configuration for the backend project."""
|
||||
|
||||
import os
|
||||
|
||||
|
||||
@@ -17,9 +17,8 @@ Including another URLconf
|
||||
from account.admin import admin
|
||||
from django.conf import settings
|
||||
from django.conf.urls import * # noqa: F401, F403
|
||||
from django.urls import include, path
|
||||
from django.conf.urls.static import static
|
||||
from django.conf import settings
|
||||
from django.urls import include, path
|
||||
|
||||
path_prefix = settings.PATH_PREFIX
|
||||
api_path_prefix = settings.API_DEPLOYMENT_PATH_PREFIX
|
||||
@@ -38,4 +37,4 @@ urlpatterns = [
|
||||
# Feature flags
|
||||
path(f"{path_prefix}/flags/", include("feature_flag.urls")),
|
||||
]
|
||||
urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
|
||||
urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
from typing import Any
|
||||
|
||||
from backend.constants import RequestKey
|
||||
from rest_framework.serializers import ModelSerializer
|
||||
|
||||
from backend.constants import RequestKey
|
||||
|
||||
|
||||
class AuditSerializer(ModelSerializer):
|
||||
def create(self, validated_data: dict[str, Any]) -> Any:
|
||||
|
||||
@@ -11,7 +11,6 @@ import os
|
||||
from django.conf import settings
|
||||
from django.core.wsgi import get_wsgi_application
|
||||
from dotenv import load_dotenv
|
||||
|
||||
from utils.log_events import start_server
|
||||
|
||||
load_dotenv()
|
||||
|
||||
@@ -1,16 +1,16 @@
|
||||
import logging
|
||||
from datetime import datetime
|
||||
|
||||
from connector_auth.constants import SocialAuthConstants
|
||||
from connector_auth.models import ConnectorAuth
|
||||
from django.db import models
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ConnectorAuthJSONField(models.JSONField):
|
||||
def from_db_value(self, value, expression, connection): # type: ignore
|
||||
""" Overrding default function. """
|
||||
"""Overrding default function."""
|
||||
metadata = super().from_db_value(value, expression, connection)
|
||||
provider = metadata.get(SocialAuthConstants.PROVIDER)
|
||||
uid = metadata.get(SocialAuthConstants.UID)
|
||||
|
||||
@@ -76,10 +76,10 @@ class ConnectorInstanceSerializer(AuditSerializer):
|
||||
if SerializerUtils.check_context_for_GET_or_POST(context=self.context):
|
||||
rep.pop(CIKey.CONNECTOR_AUTH)
|
||||
# set icon fields for UI
|
||||
rep[
|
||||
ConnectorKeys.ICON
|
||||
] = ConnectorProcessor.get_connector_data_with_key(
|
||||
instance.connector_id, ConnectorKeys.ICON
|
||||
rep[ConnectorKeys.ICON] = (
|
||||
ConnectorProcessor.get_connector_data_with_key(
|
||||
instance.connector_id, ConnectorKeys.ICON
|
||||
)
|
||||
)
|
||||
encryption_secret: str = settings.ENCRYPTION_KEY
|
||||
f: Fernet = Fernet(encryption_secret.encode("utf-8"))
|
||||
|
||||
@@ -45,7 +45,10 @@ class TestConnector(APITestCase):
|
||||
"modified_by": 2,
|
||||
"modified_at": "2023-06-14T05:28:47.759Z",
|
||||
"connector_id": "e3a4512m-efgb-48d5-98a9-3983nd77f",
|
||||
"connector_metadata": {"drive_link": "sample_url", "sharable_link": True},
|
||||
"connector_metadata": {
|
||||
"drive_link": "sample_url",
|
||||
"sharable_link": True,
|
||||
},
|
||||
}
|
||||
response = self.client.post(url, data, format="json")
|
||||
|
||||
@@ -200,9 +203,9 @@ class TestConnector(APITestCase):
|
||||
},
|
||||
}
|
||||
response = self.client.put(url, data, format="json")
|
||||
nested_value = response.data["connector_metadata"]["sample_metadata_json"][
|
||||
"key1"
|
||||
]
|
||||
nested_value = response.data["connector_metadata"][
|
||||
"sample_metadata_json"
|
||||
]["key1"]
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
self.assertEqual(nested_value, "value1")
|
||||
@@ -226,9 +229,9 @@ class TestConnector(APITestCase):
|
||||
},
|
||||
}
|
||||
response = self.client.put(url, data, format="json")
|
||||
nested_value = response.data["connector_metadata"]["sample_metadata_json"][
|
||||
"key1"
|
||||
]
|
||||
nested_value = response.data["connector_metadata"][
|
||||
"sample_metadata_json"
|
||||
]["key1"]
|
||||
nested_list = response.data["connector_metadata"]["file_list"]
|
||||
last_val = nested_list.pop()
|
||||
|
||||
@@ -293,7 +296,9 @@ class TestConnector(APITestCase):
|
||||
|
||||
self.assertEqual(
|
||||
connector_id,
|
||||
ConnectorInstance.objects.get(connector_id=connector_id).connector_id,
|
||||
ConnectorInstance.objects.get(
|
||||
connector_id=connector_id
|
||||
).connector_id,
|
||||
)
|
||||
|
||||
def test_connectors_update_json_field_patch(self) -> None:
|
||||
@@ -304,7 +309,10 @@ class TestConnector(APITestCase):
|
||||
"connector_metadata": {
|
||||
"drive_link": "patch_update_url",
|
||||
"sharable_link": True,
|
||||
"sample_metadata_json": {"key1": "patch_update1", "key2": "value2"},
|
||||
"sample_metadata_json": {
|
||||
"key1": "patch_update1",
|
||||
"key2": "value2",
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -5,7 +5,12 @@ from .views import ConnectorInstanceViewSet as CIViewSet
|
||||
|
||||
connector_list = CIViewSet.as_view({"get": "list", "post": "create"})
|
||||
connector_detail = CIViewSet.as_view(
|
||||
{"get": "retrieve", "put": "update", "patch": "partial_update", "delete": "destroy"}
|
||||
{
|
||||
"get": "retrieve",
|
||||
"put": "update",
|
||||
"patch": "partial_update",
|
||||
"delete": "destroy",
|
||||
}
|
||||
)
|
||||
|
||||
urlpatterns = format_suffix_patterns(
|
||||
|
||||
@@ -2,7 +2,6 @@ import logging
|
||||
from typing import Any, Optional
|
||||
|
||||
from account.custom_exceptions import DuplicateData
|
||||
from backend.constants import RequestKey
|
||||
from connector.constants import ConnectorInstanceKey as CIKey
|
||||
from connector_auth.constants import ConnectorAuthKey
|
||||
from connector_auth.exceptions import CacheMissException, MissingParamException
|
||||
@@ -15,6 +14,8 @@ from rest_framework.response import Response
|
||||
from rest_framework.versioning import URLPathVersioning
|
||||
from utils.filtering import FilterHelper
|
||||
|
||||
from backend.constants import RequestKey
|
||||
|
||||
from .models import ConnectorInstance
|
||||
from .serializers import ConnectorInstanceSerializer
|
||||
|
||||
|
||||
@@ -41,7 +41,10 @@ class ConnectorAuth(AbstractUserSocialAuth):
|
||||
|
||||
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
|
||||
user = models.ForeignKey(
|
||||
User, related_name="connector_auth", on_delete=models.SET_NULL, null=True
|
||||
User,
|
||||
related_name="connector_auth",
|
||||
on_delete=models.SET_NULL,
|
||||
null=True,
|
||||
)
|
||||
|
||||
def __str__(self) -> str:
|
||||
@@ -54,7 +57,10 @@ class ConnectorAuth(AbstractUserSocialAuth):
|
||||
|
||||
def set_extra_data(self, extra_data=None): # type: ignore
|
||||
ConnectorAuth.check_credential_format(extra_data)
|
||||
if extra_data[SocialAuthConstants.PROVIDER] == SocialAuthConstants.GOOGLE_OAUTH:
|
||||
if (
|
||||
extra_data[SocialAuthConstants.PROVIDER]
|
||||
== SocialAuthConstants.GOOGLE_OAUTH
|
||||
):
|
||||
extra_data = GoogleAuthHelper.enrich_connector_metadata(extra_data)
|
||||
return super().set_extra_data(extra_data)
|
||||
|
||||
@@ -67,13 +73,17 @@ class ConnectorAuth(AbstractUserSocialAuth):
|
||||
backend = self.get_backend_instance(strategy)
|
||||
if token and backend and hasattr(backend, "refresh_token"):
|
||||
response = backend.refresh_token(token, *args, **kwargs)
|
||||
extra_data = backend.extra_data(self, self.uid, response, self.extra_data)
|
||||
extra_data = backend.extra_data(
|
||||
self, self.uid, response, self.extra_data
|
||||
)
|
||||
extra_data[SocialAuthConstants.PROVIDER] = backend.name
|
||||
extra_data[SocialAuthConstants.UID] = self.uid
|
||||
if self.set_extra_data(extra_data): # type: ignore
|
||||
self.save()
|
||||
|
||||
def get_and_refresh_tokens(self, request: Request = None) -> tuple[JSONField, bool]:
|
||||
def get_and_refresh_tokens(
|
||||
self, request: Request = None
|
||||
) -> tuple[JSONField, bool]:
|
||||
"""Uses Social Auth's ability to refresh tokens if necessary.
|
||||
|
||||
Returns:
|
||||
|
||||
@@ -13,7 +13,9 @@ from social_core.backends.oauth import BaseOAuth2
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def check_user_exists(backend: BaseOAuth2, user: User, **kwargs: Any) -> dict[str, str]:
|
||||
def check_user_exists(
|
||||
backend: BaseOAuth2, user: User, **kwargs: Any
|
||||
) -> dict[str, str]:
|
||||
"""Checks if user is authenticated (will be handled in auth middleware,
|
||||
present as a fail safe)
|
||||
|
||||
@@ -46,9 +48,12 @@ def cache_oauth_creds(
|
||||
regarding expiry, uid (unique ID given by provider) and provider.
|
||||
"""
|
||||
cache_key = kwargs.get("cache_key") or backend.strategy.session_get(
|
||||
settings.SOCIAL_AUTH_FIELDS_STORED_IN_SESSION[0], ConnectorAuthKey.OAUTH_KEY
|
||||
settings.SOCIAL_AUTH_FIELDS_STORED_IN_SESSION[0],
|
||||
ConnectorAuthKey.OAUTH_KEY,
|
||||
)
|
||||
extra_data = backend.extra_data(
|
||||
user, uid, response, details, *args, **kwargs
|
||||
)
|
||||
extra_data = backend.extra_data(user, uid, response, details, *args, **kwargs)
|
||||
extra_data[SocialAuthConstants.PROVIDER] = backend.name
|
||||
extra_data[SocialAuthConstants.UID] = uid
|
||||
|
||||
|
||||
@@ -1,11 +1,13 @@
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from unstract.connectors.filesystems.google_drive.constants import GDriveConstants
|
||||
|
||||
from connector_auth.constants import SocialAuthConstants as AuthConstants
|
||||
from connector_auth.exceptions import EnrichConnectorMetadataException
|
||||
from connector_processor.constants import ConnectorKeys
|
||||
|
||||
from unstract.connectors.filesystems.google_drive.constants import (
|
||||
GDriveConstants,
|
||||
)
|
||||
|
||||
|
||||
class GoogleAuthHelper:
|
||||
@staticmethod
|
||||
@@ -24,9 +26,9 @@ class GoogleAuthHelper:
|
||||
)
|
||||
|
||||
# Used by Unstract
|
||||
kwargs[
|
||||
ConnectorKeys.PATH
|
||||
] = GDriveConstants.ROOT_PREFIX # Acts as a prefix for all paths
|
||||
kwargs[ConnectorKeys.PATH] = (
|
||||
GDriveConstants.ROOT_PREFIX
|
||||
) # Acts as a prefix for all paths
|
||||
kwargs[AuthConstants.REFRESH_AFTER] = token_expiry.strftime(
|
||||
AuthConstants.REFRESH_AFTER_FORMAT
|
||||
)
|
||||
|
||||
@@ -15,6 +15,7 @@ from connector_processor.exceptions import (
|
||||
TestConnectorException,
|
||||
TestConnectorInputException,
|
||||
)
|
||||
|
||||
from unstract.connectors.base import UnstractConnector
|
||||
from unstract.connectors.connectorkit import Connectorkit
|
||||
from unstract.connectors.enums import ConnectorMode
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
from backend.exceptions import UnstractBaseException
|
||||
from rest_framework.exceptions import APIException
|
||||
|
||||
from backend.exceptions import UnstractBaseException
|
||||
from unstract.connectors.exceptions import ConnectorError
|
||||
|
||||
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
from backend.constants import FieldLengthConstants as FLC
|
||||
from rest_framework import serializers
|
||||
|
||||
from backend.constants import FieldLengthConstants as FLC
|
||||
|
||||
|
||||
class TestConnectorSerializer(serializers.Serializer):
|
||||
connector_id = serializers.CharField(max_length=FLC.CONNECTOR_ID_LENGTH)
|
||||
|
||||
@@ -6,7 +6,11 @@ from . import views
|
||||
connector_test = ConnectorViewSet.as_view({"post": "test"})
|
||||
|
||||
urlpatterns = [
|
||||
path("connector_schema/", views.get_connector_schema, name="get_connector_schema"),
|
||||
path(
|
||||
"connector_schema/",
|
||||
views.get_connector_schema,
|
||||
name="get_connector_schema",
|
||||
),
|
||||
path(
|
||||
"supported_connectors/",
|
||||
views.get_supported_connectors,
|
||||
|
||||
@@ -12,5 +12,9 @@ schema_view = get_schema_view(
|
||||
)
|
||||
|
||||
urlpatterns = [
|
||||
path("doc/", schema_view.with_ui("redoc", cache_timeout=0), name="schema-redoc"),
|
||||
path(
|
||||
"doc/",
|
||||
schema_view.with_ui("redoc", cache_timeout=0),
|
||||
name="schema-redoc",
|
||||
),
|
||||
]
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
This module defines the URL patterns for the feature_flags app.
|
||||
"""
|
||||
|
||||
import feature_flag.views as views
|
||||
from django.urls import path
|
||||
|
||||
|
||||
@@ -3,12 +3,14 @@
|
||||
Returns:
|
||||
evaluate response
|
||||
"""
|
||||
|
||||
import logging
|
||||
|
||||
from rest_framework import status
|
||||
from rest_framework.decorators import api_view
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
|
||||
from unstract.flags.client import EvaluationClient
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -7,8 +7,8 @@ class FileInformationKey:
|
||||
FILE_UPLOAD_ALLOWED_EXT = ["pdf"]
|
||||
FILE_UPLOAD_ALLOWED_MIME = ["application/pdf"]
|
||||
|
||||
|
||||
class FileViewTypes:
|
||||
ORIGINAL = "ORIGINAL"
|
||||
EXTRACT = "EXTRACT"
|
||||
SUMMARIZE = "SUMMARIZE"
|
||||
|
||||
|
||||
@@ -126,9 +126,9 @@ class FileManagerHelper:
|
||||
response = StreamingHttpResponse(
|
||||
file, content_type=file_content_type
|
||||
)
|
||||
response[
|
||||
"Content-Disposition"
|
||||
] = f"attachment; filename={base_name}"
|
||||
response["Content-Disposition"] = (
|
||||
f"attachment; filename={base_name}"
|
||||
)
|
||||
return response
|
||||
except ApiRequestError as exception:
|
||||
FileManagerHelper.logger.error(
|
||||
@@ -194,8 +194,7 @@ class FileManagerHelper:
|
||||
|
||||
elif file_content_type == "text/plain":
|
||||
with fs.open(file_path, "r") as file:
|
||||
FileManagerHelper.logger.info(
|
||||
f"Reading text file: {file_path}")
|
||||
FileManagerHelper.logger.info(f"Reading text file: {file_path}")
|
||||
text_content = file.read()
|
||||
return text_content
|
||||
else:
|
||||
|
||||
@@ -145,12 +145,13 @@ class FileManagementViewSet(viewsets.ModelViewSet):
|
||||
|
||||
# Create a record in the db for the file
|
||||
document = PromptStudioDocumentHelper.create(
|
||||
tool_id=tool_id, document_name=file_name)
|
||||
tool_id=tool_id, document_name=file_name
|
||||
)
|
||||
# Create a dictionary to store document data
|
||||
doc = {
|
||||
"document_id": document.document_id,
|
||||
"document_name": document.document_name,
|
||||
"tool": document.tool.tool_id
|
||||
"tool": document.tool.tool_id,
|
||||
}
|
||||
# Store file
|
||||
logger.info(
|
||||
@@ -177,7 +178,7 @@ class FileManagementViewSet(viewsets.ModelViewSet):
|
||||
tool_id: str = serializer.validated_data.get("tool_id")
|
||||
view_type: str = serializer.validated_data.get("view_type")
|
||||
|
||||
filename_without_extension = file_name.rsplit('.', 1)[0]
|
||||
filename_without_extension = file_name.rsplit(".", 1)[0]
|
||||
if view_type == FileViewTypes.EXTRACT:
|
||||
file_name = (
|
||||
f"{FileViewTypes.EXTRACT.lower()}/"
|
||||
@@ -189,20 +190,19 @@ class FileManagementViewSet(viewsets.ModelViewSet):
|
||||
f"{filename_without_extension}.txt"
|
||||
)
|
||||
|
||||
file_path = (
|
||||
file_path
|
||||
) = FileManagerHelper.handle_sub_directory_for_tenants(
|
||||
request.org_id,
|
||||
is_create=True,
|
||||
user_id=request.user.user_id,
|
||||
tool_id=tool_id,
|
||||
file_path = file_path = (
|
||||
FileManagerHelper.handle_sub_directory_for_tenants(
|
||||
request.org_id,
|
||||
is_create=True,
|
||||
user_id=request.user.user_id,
|
||||
tool_id=tool_id,
|
||||
)
|
||||
)
|
||||
file_system = LocalStorageFS(settings={"path": file_path})
|
||||
if not file_path.endswith("/"):
|
||||
file_path += "/"
|
||||
file_path += file_name
|
||||
contents = FileManagerHelper.fetch_file_contents(
|
||||
file_system, file_path)
|
||||
contents = FileManagerHelper.fetch_file_contents(file_system, file_path)
|
||||
return Response({"data": contents}, status=status.HTTP_200_OK)
|
||||
|
||||
@action(detail=True, methods=["get"])
|
||||
|
||||
@@ -3,4 +3,4 @@ ALTER ROLE unstract_dev SET default_transaction_isolation TO 'read committed';
|
||||
ALTER ROLE unstract_dev SET timezone TO 'UTC';
|
||||
ALTER USER unstract_dev CREATEDB;
|
||||
GRANT ALL PRIVILEGES ON DATABASE unstract_db TO unstract_dev;
|
||||
CREATE DATABASE unstract;
|
||||
CREATE DATABASE unstract;
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import logging
|
||||
from typing import Any, Optional
|
||||
|
||||
from backend.constants import RequestHeader
|
||||
from django.conf import settings
|
||||
from django.urls import reverse
|
||||
from pipeline.constants import PipelineKey, PipelineURL
|
||||
@@ -11,9 +10,14 @@ from pipeline.pipeline_processor import PipelineProcessor
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
from utils.request.constants import RequestConstants
|
||||
from workflow_manager.workflow.constants import WorkflowExecutionKey, WorkflowKey
|
||||
from workflow_manager.workflow.constants import (
|
||||
WorkflowExecutionKey,
|
||||
WorkflowKey,
|
||||
)
|
||||
from workflow_manager.workflow.views import WorkflowViewSet
|
||||
|
||||
from backend.constants import RequestHeader
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
|
||||
@@ -10,7 +10,12 @@ pipeline_list = PipelineViewSet.as_view(
|
||||
}
|
||||
)
|
||||
pipeline_detail = PipelineViewSet.as_view(
|
||||
{"get": "retrieve", "put": "update", "patch": "partial_update", "delete": "destroy"}
|
||||
{
|
||||
"get": "retrieve",
|
||||
"put": "update",
|
||||
"patch": "partial_update",
|
||||
"delete": "destroy",
|
||||
}
|
||||
)
|
||||
|
||||
pipeline_execute = PipelineViewSet.as_view({"post": "execute"})
|
||||
|
||||
@@ -36,6 +36,7 @@ class InvalidRequest(APIException):
|
||||
status_code = 401
|
||||
default_detail = "Invalid Request"
|
||||
|
||||
|
||||
class DuplicateData(APIException):
|
||||
status_code = 400
|
||||
default_detail = "Duplicate Data"
|
||||
|
||||
@@ -32,7 +32,9 @@ class PlatformAuthHelper:
|
||||
)
|
||||
raise error
|
||||
if not auth_controller.is_admin_by_role(member.role):
|
||||
logger.error("User is not having right access to perform this operation.")
|
||||
logger.error(
|
||||
"User is not having right access to perform this operation."
|
||||
)
|
||||
raise UserForbidden()
|
||||
else:
|
||||
pass
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
# serializers.py
|
||||
|
||||
from account.models import PlatformKey
|
||||
from backend.serializers import AuditSerializer
|
||||
from rest_framework import serializers
|
||||
|
||||
from backend.serializers import AuditSerializer
|
||||
|
||||
|
||||
class PlatformKeySerializer(AuditSerializer):
|
||||
class Meta:
|
||||
|
||||
@@ -7,7 +7,13 @@ from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
|
||||
from .auth_helper import AuthHelper
|
||||
from .dto import AuthOrganization, ResetUserPasswordDto, TokenData, User, UserInfo
|
||||
from .dto import (
|
||||
AuthOrganization,
|
||||
ResetUserPasswordDto,
|
||||
TokenData,
|
||||
User,
|
||||
UserInfo,
|
||||
)
|
||||
from .enums import Region
|
||||
from .exceptions import MethodNotImplemented
|
||||
|
||||
@@ -36,7 +42,10 @@ class AuthService(ABC):
|
||||
self, user: User, token: Optional[dict[str, Any]] = None
|
||||
) -> Optional[UserInfo]:
|
||||
return UserInfo(
|
||||
id=user.id, name=user.username, display_name=user.username, email=user.email
|
||||
id=user.id,
|
||||
name=user.username,
|
||||
display_name=user.username,
|
||||
email=user.email,
|
||||
)
|
||||
|
||||
def get_organization_info(self, org_id: str) -> Any:
|
||||
@@ -64,7 +73,9 @@ class AuthService(ABC):
|
||||
def get_user_id_from_token(self, token: dict[str, Any]) -> Response:
|
||||
return token["userinfo"]["sub"]
|
||||
|
||||
def get_organization_members_by_org_id(self, organization_id: str) -> Response:
|
||||
def get_organization_members_by_org_id(
|
||||
self, organization_id: str
|
||||
) -> Response:
|
||||
raise MethodNotImplemented()
|
||||
|
||||
def reset_user_password(self, user: User) -> ResetUserPasswordDto:
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
from typing import Any
|
||||
|
||||
from backend.serializers import AuditSerializer
|
||||
from project.models import Project
|
||||
from workflow_manager.workflow.constants import WorkflowKey
|
||||
from workflow_manager.workflow.serializers import WorkflowSerializer
|
||||
|
||||
from backend.serializers import AuditSerializer
|
||||
|
||||
|
||||
class ProjectSerializer(AuditSerializer):
|
||||
class Meta:
|
||||
|
||||
@@ -79,7 +79,8 @@ class TestProjects(APITestCase):
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
self.assertEqual(
|
||||
project_name, Project.objects.get(project_name=project_name).project_name
|
||||
project_name,
|
||||
Project.objects.get(project_name=project_name).project_name,
|
||||
)
|
||||
|
||||
def test_projects_update_pk(self) -> None:
|
||||
@@ -107,7 +108,8 @@ class TestProjects(APITestCase):
|
||||
project_name = response.data["project_name"]
|
||||
|
||||
self.assertEqual(
|
||||
project_name, Project.objects.get(project_name=project_name).project_name
|
||||
project_name,
|
||||
Project.objects.get(project_name=project_name).project_name,
|
||||
)
|
||||
|
||||
def test_projects_delete(self) -> None:
|
||||
|
||||
@@ -5,19 +5,30 @@ from .views import ProjectViewSet
|
||||
|
||||
project_list = ProjectViewSet.as_view({"get": "list", "post": "create"})
|
||||
project_detail = ProjectViewSet.as_view(
|
||||
{"get": "retrieve", "put": "update", "patch": "partial_update", "delete": "destroy"}
|
||||
{
|
||||
"get": "retrieve",
|
||||
"put": "update",
|
||||
"patch": "partial_update",
|
||||
"delete": "destroy",
|
||||
}
|
||||
)
|
||||
|
||||
project_settings = ProjectViewSet.as_view(
|
||||
{"get": "project_settings", "put": "project_settings"}
|
||||
)
|
||||
project_settings_schema = ProjectViewSet.as_view({"get": "project_settings_schema"})
|
||||
project_settings_schema = ProjectViewSet.as_view(
|
||||
{"get": "project_settings_schema"}
|
||||
)
|
||||
|
||||
urlpatterns = format_suffix_patterns(
|
||||
[
|
||||
path("projects/", project_list, name="projects-list"),
|
||||
path("projects/<uuid:pk>/", project_detail, name="projects-detail"),
|
||||
path("projects/<uuid:pk>/settings/", project_settings, name="project-settings"),
|
||||
path(
|
||||
"projects/<uuid:pk>/settings/",
|
||||
project_settings,
|
||||
name="project-settings",
|
||||
),
|
||||
path(
|
||||
"projects/settings/",
|
||||
project_settings_schema,
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
import pytest
|
||||
|
||||
from django.core.management import call_command
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def django_db_setup(django_db_blocker):
|
||||
|
||||
fixtures = ["./prompt/tests/fixtures/prompts_001.json"]
|
||||
with django_db_blocker.unblock():
|
||||
call_command('loaddata', *fixtures)
|
||||
call_command("loaddata", *fixtures)
|
||||
|
||||
@@ -1,10 +1,9 @@
|
||||
import pytest
|
||||
from django.urls import reverse
|
||||
from prompt.models import Prompt
|
||||
from rest_framework import status
|
||||
from rest_framework.test import APITestCase
|
||||
|
||||
from prompt.models import Prompt
|
||||
|
||||
pytestmark = pytest.mark.django_db
|
||||
|
||||
|
||||
@@ -27,8 +26,9 @@ class TestPrompts(APITestCase):
|
||||
|
||||
def test_prompts_detail_throw_404(self):
|
||||
"""Tests whether a 404 error is thrown on retrieving a prompt."""
|
||||
url = reverse("prompts-detail",
|
||||
kwargs={"pk": 200}) # Prompt doesn't exist
|
||||
url = reverse(
|
||||
"prompts-detail", kwargs={"pk": 200}
|
||||
) # Prompt doesn't exist
|
||||
response = self.client.get(url)
|
||||
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
|
||||
|
||||
|
||||
@@ -1,20 +1,21 @@
|
||||
from django.urls import path
|
||||
from rest_framework.urlpatterns import format_suffix_patterns
|
||||
|
||||
from .views import PromptViewSet
|
||||
|
||||
prompt_list = PromptViewSet.as_view({
|
||||
'get': 'list',
|
||||
'post': 'create'
|
||||
})
|
||||
prompt_detail = PromptViewSet.as_view({
|
||||
'get': 'retrieve',
|
||||
'put': 'update',
|
||||
'patch': 'partial_update',
|
||||
'delete': 'destroy'
|
||||
})
|
||||
|
||||
urlpatterns = format_suffix_patterns([
|
||||
path('prompt/', prompt_list, name='prompt-list'),
|
||||
path('prompt/<uuid:pk>/', prompt_detail, name='prompt-detail'),
|
||||
])
|
||||
prompt_list = PromptViewSet.as_view({"get": "list", "post": "create"})
|
||||
prompt_detail = PromptViewSet.as_view(
|
||||
{
|
||||
"get": "retrieve",
|
||||
"put": "update",
|
||||
"patch": "partial_update",
|
||||
"delete": "destroy",
|
||||
}
|
||||
)
|
||||
|
||||
urlpatterns = format_suffix_patterns(
|
||||
[
|
||||
path("prompt/", prompt_list, name="prompt-list"),
|
||||
path("prompt/<uuid:pk>/", prompt_detail, name="prompt-detail"),
|
||||
]
|
||||
)
|
||||
|
||||
@@ -18,7 +18,7 @@ class Migration(migrations.Migration):
|
||||
(
|
||||
"prompt_studio_core",
|
||||
"0007_remove_customtool_default_profile_and_more",
|
||||
)
|
||||
),
|
||||
]
|
||||
|
||||
def MigrateProfileManager(apps: Any, schema_editor: Any) -> None:
|
||||
|
||||
@@ -6,7 +6,10 @@ from django.db import migrations, models
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("prompt_studio_core", "0008_customtool_exclude_failed_customtool_monitor_llm"),
|
||||
(
|
||||
"prompt_studio_core",
|
||||
"0008_customtool_exclude_failed_customtool_monitor_llm",
|
||||
),
|
||||
("prompt_profile_manager", "0008_profilemanager_migration"),
|
||||
]
|
||||
|
||||
|
||||
@@ -22,19 +22,19 @@ class ProfileManagerSerializer(AuditSerializer):
|
||||
vector_db = rep[ProfileManagerKeys.VECTOR_STORE]
|
||||
x2text = rep[ProfileManagerKeys.X2TEXT]
|
||||
if llm:
|
||||
rep[
|
||||
ProfileManagerKeys.LLM
|
||||
] = AdapterProcessor.get_adapter_instance_by_id(llm)
|
||||
rep[ProfileManagerKeys.LLM] = (
|
||||
AdapterProcessor.get_adapter_instance_by_id(llm)
|
||||
)
|
||||
if embedding:
|
||||
rep[
|
||||
ProfileManagerKeys.EMBEDDING_MODEL
|
||||
] = AdapterProcessor.get_adapter_instance_by_id(embedding)
|
||||
rep[ProfileManagerKeys.EMBEDDING_MODEL] = (
|
||||
AdapterProcessor.get_adapter_instance_by_id(embedding)
|
||||
)
|
||||
if vector_db:
|
||||
rep[
|
||||
ProfileManagerKeys.VECTOR_STORE
|
||||
] = AdapterProcessor.get_adapter_instance_by_id(vector_db)
|
||||
rep[ProfileManagerKeys.VECTOR_STORE] = (
|
||||
AdapterProcessor.get_adapter_instance_by_id(vector_db)
|
||||
)
|
||||
if x2text:
|
||||
rep[
|
||||
ProfileManagerKeys.X2TEXT
|
||||
] = AdapterProcessor.get_adapter_instance_by_id(x2text)
|
||||
rep[ProfileManagerKeys.X2TEXT] = (
|
||||
AdapterProcessor.get_adapter_instance_by_id(x2text)
|
||||
)
|
||||
return rep
|
||||
|
||||
@@ -45,7 +45,9 @@ class ProfileManagerView(viewsets.ModelViewSet):
|
||||
def create(
|
||||
self, request: HttpRequest, *args: tuple[Any], **kwargs: dict[str, Any]
|
||||
) -> Response:
|
||||
serializer: ProfileManagerSerializer = self.get_serializer(data=request.data)
|
||||
serializer: ProfileManagerSerializer = self.get_serializer(
|
||||
data=request.data
|
||||
)
|
||||
# Overriding default exception behaviour
|
||||
# TO DO : Handle model related exceptions.
|
||||
serializer.is_valid(raise_exception=True)
|
||||
|
||||
@@ -6,43 +6,43 @@ from django.db import migrations, models
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('prompt_studio', '0001_initial'),
|
||||
("prompt_studio", "0001_initial"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='toolstudioprompt',
|
||||
name='eval_guidance_completeness',
|
||||
model_name="toolstudioprompt",
|
||||
name="eval_guidance_completeness",
|
||||
field=models.BooleanField(default=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='toolstudioprompt',
|
||||
name='eval_guidance_toxicity',
|
||||
model_name="toolstudioprompt",
|
||||
name="eval_guidance_toxicity",
|
||||
field=models.BooleanField(default=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='toolstudioprompt',
|
||||
name='eval_quality_correctness',
|
||||
model_name="toolstudioprompt",
|
||||
name="eval_quality_correctness",
|
||||
field=models.BooleanField(default=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='toolstudioprompt',
|
||||
name='eval_quality_faithfulness',
|
||||
model_name="toolstudioprompt",
|
||||
name="eval_quality_faithfulness",
|
||||
field=models.BooleanField(default=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='toolstudioprompt',
|
||||
name='eval_quality_relevance',
|
||||
model_name="toolstudioprompt",
|
||||
name="eval_quality_relevance",
|
||||
field=models.BooleanField(default=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='toolstudioprompt',
|
||||
name='eval_security_pii',
|
||||
model_name="toolstudioprompt",
|
||||
name="eval_security_pii",
|
||||
field=models.BooleanField(default=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='toolstudioprompt',
|
||||
name='evaluate',
|
||||
model_name="toolstudioprompt",
|
||||
name="evaluate",
|
||||
field=models.BooleanField(default=True),
|
||||
),
|
||||
]
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
from backend.serializers import AuditSerializer
|
||||
from rest_framework import serializers
|
||||
|
||||
from backend.serializers import AuditSerializer
|
||||
|
||||
from .models import ToolStudioPrompt
|
||||
|
||||
|
||||
|
||||
@@ -5,7 +5,10 @@ from django.db import migrations, models
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("prompt_studio_core", "0005_alter_customtool_default_profile_and_more"),
|
||||
(
|
||||
"prompt_studio_core",
|
||||
"0005_alter_customtool_default_profile_and_more",
|
||||
),
|
||||
]
|
||||
|
||||
operations = [
|
||||
@@ -13,7 +16,8 @@ class Migration(migrations.Migration):
|
||||
model_name="customtool",
|
||||
name="summarize_as_source",
|
||||
field=models.BooleanField(
|
||||
db_comment="Flag to use summarized content as source", default=False
|
||||
db_comment="Flag to use summarized content as source",
|
||||
default=False,
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
|
||||
@@ -48,12 +48,12 @@ class CustomTool(BaseModel):
|
||||
preamble = models.TextField(
|
||||
blank=True,
|
||||
db_comment="Preamble to the prompts",
|
||||
default=DefaultPrompts.PREAMBLE
|
||||
default=DefaultPrompts.PREAMBLE,
|
||||
)
|
||||
postamble = models.TextField(
|
||||
blank=True,
|
||||
db_comment="Appended as postable to prompts.",
|
||||
default=DefaultPrompts.POSTAMBLE
|
||||
default=DefaultPrompts.POSTAMBLE,
|
||||
)
|
||||
prompt_grammer = models.JSONField(
|
||||
null=True, blank=True, db_comment="Synonymous words used in prompt"
|
||||
|
||||
@@ -184,9 +184,9 @@ class PromptStudioHelper:
|
||||
Returns:
|
||||
List[ToolStudioPrompt]: List of instance of the model
|
||||
"""
|
||||
prompt_instances: list[
|
||||
ToolStudioPrompt
|
||||
] = ToolStudioPrompt.objects.filter(tool_id=tool_id)
|
||||
prompt_instances: list[ToolStudioPrompt] = (
|
||||
ToolStudioPrompt.objects.filter(tool_id=tool_id)
|
||||
)
|
||||
return prompt_instances
|
||||
|
||||
@staticmethod
|
||||
@@ -509,9 +509,9 @@ class PromptStudioHelper:
|
||||
)
|
||||
|
||||
output: dict[str, Any] = {}
|
||||
output[
|
||||
TSPKeys.ASSERTION_FAILURE_PROMPT
|
||||
] = prompt.assertion_failure_prompt
|
||||
output[TSPKeys.ASSERTION_FAILURE_PROMPT] = (
|
||||
prompt.assertion_failure_prompt
|
||||
)
|
||||
output[TSPKeys.ASSERT_PROMPT] = prompt.assert_prompt
|
||||
output[TSPKeys.IS_ASSERT] = prompt.is_assert
|
||||
output[TSPKeys.PROMPT] = prompt.prompt
|
||||
@@ -526,12 +526,12 @@ class PromptStudioHelper:
|
||||
output[TSPKeys.GRAMMAR] = grammar_list
|
||||
output[TSPKeys.TYPE] = prompt.enforce_type
|
||||
output[TSPKeys.NAME] = prompt.prompt_key
|
||||
output[
|
||||
TSPKeys.RETRIEVAL_STRATEGY
|
||||
] = prompt.profile_manager.retrieval_strategy
|
||||
output[
|
||||
TSPKeys.SIMILARITY_TOP_K
|
||||
] = prompt.profile_manager.similarity_top_k
|
||||
output[TSPKeys.RETRIEVAL_STRATEGY] = (
|
||||
prompt.profile_manager.retrieval_strategy
|
||||
)
|
||||
output[TSPKeys.SIMILARITY_TOP_K] = (
|
||||
prompt.profile_manager.similarity_top_k
|
||||
)
|
||||
output[TSPKeys.SECTION] = prompt.profile_manager.section
|
||||
output[TSPKeys.X2TEXT_ADAPTER] = x2text
|
||||
# Eval settings for the prompt
|
||||
@@ -547,9 +547,9 @@ class PromptStudioHelper:
|
||||
] = tool.exclude_failed
|
||||
output[TSPKeys.ENABLE_CHALLENGE] = tool.enable_challenge
|
||||
output[TSPKeys.CHALLENGE_LLM] = challenge_llm
|
||||
output[
|
||||
TSPKeys.SINGLE_PASS_EXTRACTION_MODE
|
||||
] = tool.single_pass_extraction_mode
|
||||
output[TSPKeys.SINGLE_PASS_EXTRACTION_MODE] = (
|
||||
tool.single_pass_extraction_mode
|
||||
)
|
||||
for attr in dir(prompt):
|
||||
if attr.startswith(TSPKeys.EVAL_METRIC_PREFIX):
|
||||
attr_val = getattr(prompt, attr)
|
||||
|
||||
@@ -131,9 +131,9 @@ class PromptStudioCoreView(viewsets.ModelViewSet):
|
||||
Response: Reponse of dropdown dict
|
||||
"""
|
||||
try:
|
||||
select_choices: dict[
|
||||
str, Any
|
||||
] = PromptStudioHelper.get_select_fields()
|
||||
select_choices: dict[str, Any] = (
|
||||
PromptStudioHelper.get_select_fields()
|
||||
)
|
||||
return Response(select_choices, status=status.HTTP_200_OK)
|
||||
except Exception as e:
|
||||
logger.error(f"Error occured while fetching select fields {e}")
|
||||
|
||||
@@ -2,4 +2,4 @@ from django.apps import AppConfig
|
||||
|
||||
|
||||
class PromptStudioDocumentManagerConfig(AppConfig):
|
||||
name = 'prompt_studio.prompt_studio_document_manager'
|
||||
name = "prompt_studio.prompt_studio_document_manager"
|
||||
|
||||
@@ -12,7 +12,10 @@ class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
("prompt_studio_core", "0007_remove_customtool_default_profile_and_more"),
|
||||
(
|
||||
"prompt_studio_core",
|
||||
"0007_remove_customtool_default_profile_and_more",
|
||||
),
|
||||
]
|
||||
|
||||
operations = [
|
||||
@@ -33,7 +36,8 @@ class Migration(migrations.Migration):
|
||||
(
|
||||
"document_name",
|
||||
models.CharField(
|
||||
db_comment="Field to store the document name", editable=False
|
||||
db_comment="Field to store the document name",
|
||||
editable=False,
|
||||
),
|
||||
),
|
||||
(
|
||||
@@ -71,7 +75,8 @@ class Migration(migrations.Migration):
|
||||
migrations.AddConstraint(
|
||||
model_name="documentmanager",
|
||||
constraint=models.UniqueConstraint(
|
||||
fields=("document_name", "tool"), name="unique_document_name_tool"
|
||||
fields=("document_name", "tool"),
|
||||
name="unique_document_name_tool",
|
||||
),
|
||||
),
|
||||
]
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
class IndexManagerKeys:
|
||||
PROFILE_MANAGER = "profile_manager"
|
||||
DOCUMENT_MANAGER = "document_manager"
|
||||
DOCUMENT_MANAGER = "document_manager"
|
||||
|
||||
@@ -52,7 +52,8 @@ class Migration(migrations.Migration):
|
||||
(
|
||||
"index_ids_history",
|
||||
models.JSONField(
|
||||
db_comment="List of index ids", default=list),
|
||||
db_comment="List of index ids", default=list
|
||||
),
|
||||
),
|
||||
(
|
||||
"created_by",
|
||||
|
||||
@@ -4,4 +4,3 @@ from rest_framework.exceptions import APIException
|
||||
class InternalError(APIException):
|
||||
status_code = 400
|
||||
default_detail = "Internal service error."
|
||||
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
# Generated by Django 4.2.1 on 2024-02-07 11:20
|
||||
|
||||
import uuid
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
import uuid
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
@@ -31,7 +32,10 @@ class Migration(migrations.Migration):
|
||||
serialize=False,
|
||||
),
|
||||
),
|
||||
("output", models.CharField(db_comment="Field to store output")),
|
||||
(
|
||||
"output",
|
||||
models.CharField(db_comment="Field to store output"),
|
||||
),
|
||||
(
|
||||
"created_by",
|
||||
models.ForeignKey(
|
||||
|
||||
@@ -1,12 +1,15 @@
|
||||
# Generated by Django 4.2.1 on 2024-02-07 19:53
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.utils.timezone
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("prompt_studio_output_manager", "0002_promptstudiooutputmanager_doc_name"),
|
||||
(
|
||||
"prompt_studio_output_manager",
|
||||
"0002_promptstudiooutputmanager_doc_name",
|
||||
),
|
||||
]
|
||||
|
||||
operations = [
|
||||
|
||||
@@ -15,6 +15,8 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name="promptstudiooutputmanager",
|
||||
name="doc_name",
|
||||
field=models.CharField(db_comment="Field to store the document name"),
|
||||
field=models.CharField(
|
||||
db_comment="Field to store the document name"
|
||||
),
|
||||
),
|
||||
]
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Generated by Django 4.2.1 on 2024-02-07 20:53
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
@@ -5,34 +5,39 @@ from django.db import migrations, models
|
||||
|
||||
def delete_duplicates_and_nulls(apps, schema_editor):
|
||||
prompt_studio_output_manager = apps.get_model(
|
||||
"prompt_studio_output_manager", "PromptStudioOutputManager")
|
||||
"prompt_studio_output_manager", "PromptStudioOutputManager"
|
||||
)
|
||||
|
||||
# Delete rows where prompt_id, document_manager, profile_manager, or tool_id is NULL
|
||||
prompt_studio_output_manager.objects.filter(
|
||||
models.Q(prompt_id=None) |
|
||||
models.Q(document_manager=None) |
|
||||
models.Q(profile_manager=None) |
|
||||
models.Q(tool_id=None)
|
||||
models.Q(prompt_id=None)
|
||||
| models.Q(document_manager=None)
|
||||
| models.Q(profile_manager=None)
|
||||
| models.Q(tool_id=None)
|
||||
).delete()
|
||||
|
||||
# Find duplicate rows based on unique constraint fields and count their occurrences
|
||||
duplicates = prompt_studio_output_manager.objects.values(
|
||||
'prompt_id', 'document_manager', 'profile_manager', 'tool_id'
|
||||
).annotate(
|
||||
count=models.Count('prompt_output_id')
|
||||
).filter(
|
||||
count__gt=1 # Filter to only get rows that have duplicates
|
||||
duplicates = (
|
||||
prompt_studio_output_manager.objects.values(
|
||||
"prompt_id", "document_manager", "profile_manager", "tool_id"
|
||||
)
|
||||
.annotate(count=models.Count("prompt_output_id"))
|
||||
.filter(count__gt=1) # Filter to only get rows that have duplicates
|
||||
)
|
||||
|
||||
# Iterate over each set of duplicates found
|
||||
for duplicate in duplicates:
|
||||
# Find all instances of duplicates for the current set
|
||||
pks = prompt_studio_output_manager.objects.filter(
|
||||
prompt_id=duplicate['prompt_id'],
|
||||
document_manager=duplicate['document_manager'],
|
||||
profile_manager=duplicate['profile_manager'],
|
||||
tool_id=duplicate['tool_id']
|
||||
).order_by('-created_at').values_list('pk')[1:] # Order by created_at descending and skip the first one (keep the latest)
|
||||
pks = (
|
||||
prompt_studio_output_manager.objects.filter(
|
||||
prompt_id=duplicate["prompt_id"],
|
||||
document_manager=duplicate["document_manager"],
|
||||
profile_manager=duplicate["profile_manager"],
|
||||
tool_id=duplicate["tool_id"],
|
||||
)
|
||||
.order_by("-created_at")
|
||||
.values_list("pk")[1:]
|
||||
) # Order by created_at descending and skip the first one (keep the latest)
|
||||
|
||||
# Delete the duplicate rows
|
||||
prompt_studio_output_manager.objects.filter(pk__in=pks).delete()
|
||||
@@ -47,6 +52,7 @@ class Migration(migrations.Migration):
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(delete_duplicates_and_nulls,
|
||||
reverse_code=migrations.RunPython.noop),
|
||||
migrations.RunPython(
|
||||
delete_duplicates_and_nulls, reverse_code=migrations.RunPython.noop
|
||||
),
|
||||
]
|
||||
|
||||
@@ -6,8 +6,14 @@ from django.db import migrations, models
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("prompt_studio_core", "0008_customtool_exclude_failed_customtool_monitor_llm"),
|
||||
("prompt_profile_manager", "0009_alter_profilemanager_prompt_studio_tool"),
|
||||
(
|
||||
"prompt_studio_core",
|
||||
"0008_customtool_exclude_failed_customtool_monitor_llm",
|
||||
),
|
||||
(
|
||||
"prompt_profile_manager",
|
||||
"0009_alter_profilemanager_prompt_studio_tool",
|
||||
),
|
||||
("prompt_studio", "0006_alter_toolstudioprompt_prompt_key_and_more"),
|
||||
("prompt_studio_output_manager", "0010_delete_duplicate_rows"),
|
||||
]
|
||||
@@ -17,7 +23,8 @@ class Migration(migrations.Migration):
|
||||
model_name="promptstudiooutputmanager",
|
||||
name="is_single_pass_extract",
|
||||
field=models.BooleanField(
|
||||
db_comment="Is the single pass extraction mode active", default=False
|
||||
db_comment="Is the single pass extraction mode active",
|
||||
default=False,
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
|
||||
@@ -72,8 +72,13 @@ class PromptStudioOutputManager(BaseModel):
|
||||
class Meta:
|
||||
constraints = [
|
||||
models.UniqueConstraint(
|
||||
fields=["prompt_id", "document_manager", "profile_manager",
|
||||
"tool_id", "is_single_pass_extract"],
|
||||
fields=[
|
||||
"prompt_id",
|
||||
"document_manager",
|
||||
"profile_manager",
|
||||
"tool_id",
|
||||
"is_single_pass_extract",
|
||||
],
|
||||
name="unique_prompt_output",
|
||||
),
|
||||
]
|
||||
|
||||
@@ -7,4 +7,3 @@ class PromptStudioOutputSerializer(AuditSerializer):
|
||||
class Meta:
|
||||
model = PromptStudioOutputManager
|
||||
fields = "__all__"
|
||||
|
||||
|
||||
@@ -43,9 +43,9 @@ class PromptStudioOutputView(viewsets.ModelViewSet):
|
||||
is_single_pass_extract_param
|
||||
)
|
||||
|
||||
filter_args[
|
||||
PromptStudioOutputManagerKeys.IS_SINGLE_PASS_EXTRACT
|
||||
] = is_single_pass_extract
|
||||
filter_args[PromptStudioOutputManagerKeys.IS_SINGLE_PASS_EXTRACT] = (
|
||||
is_single_pass_extract
|
||||
)
|
||||
|
||||
if filter_args:
|
||||
queryset = PromptStudioOutputManager.objects.filter(**filter_args)
|
||||
|
||||
@@ -115,10 +115,10 @@ class PromptStudioRegistryHelper:
|
||||
PromptStudioRegistryHelper.frame_properties(tool=custom_tool)
|
||||
)
|
||||
spec: Spec = PromptStudioRegistryHelper.frame_spec(tool=custom_tool)
|
||||
prompts: list[
|
||||
ToolStudioPrompt
|
||||
] = PromptStudioHelper.fetch_prompt_from_tool(
|
||||
tool_id=custom_tool.tool_id
|
||||
prompts: list[ToolStudioPrompt] = (
|
||||
PromptStudioHelper.fetch_prompt_from_tool(
|
||||
tool_id=custom_tool.tool_id
|
||||
)
|
||||
)
|
||||
metadata = PromptStudioRegistryHelper.frame_export_json(
|
||||
tool=custom_tool, prompts=prompts
|
||||
@@ -195,9 +195,9 @@ class PromptStudioRegistryHelper:
|
||||
adapter_id = str(prompt.profile_manager.embedding_model.adapter_id)
|
||||
embedding_suffix = adapter_id.split("|")[0]
|
||||
|
||||
output[
|
||||
JsonSchemaKey.ASSERTION_FAILURE_PROMPT
|
||||
] = prompt.assertion_failure_prompt
|
||||
output[JsonSchemaKey.ASSERTION_FAILURE_PROMPT] = (
|
||||
prompt.assertion_failure_prompt
|
||||
)
|
||||
output[JsonSchemaKey.ASSERT_PROMPT] = prompt.assert_prompt
|
||||
output[JsonSchemaKey.IS_ASSERT] = prompt.is_assert
|
||||
output[JsonSchemaKey.PROMPT] = prompt.prompt
|
||||
@@ -206,21 +206,21 @@ class PromptStudioRegistryHelper:
|
||||
output[JsonSchemaKey.VECTOR_DB] = vector_db
|
||||
output[JsonSchemaKey.EMBEDDING] = embedding_model
|
||||
output[JsonSchemaKey.X2TEXT_ADAPTER] = x2text
|
||||
output[
|
||||
JsonSchemaKey.CHUNK_OVERLAP
|
||||
] = prompt.profile_manager.chunk_overlap
|
||||
output[JsonSchemaKey.CHUNK_OVERLAP] = (
|
||||
prompt.profile_manager.chunk_overlap
|
||||
)
|
||||
output[JsonSchemaKey.LLM] = llm
|
||||
output[JsonSchemaKey.PREAMBLE] = tool.preamble
|
||||
output[JsonSchemaKey.POSTAMBLE] = tool.postamble
|
||||
output[JsonSchemaKey.GRAMMAR] = grammar_list
|
||||
output[JsonSchemaKey.TYPE] = prompt.enforce_type
|
||||
output[JsonSchemaKey.NAME] = prompt.prompt_key
|
||||
output[
|
||||
JsonSchemaKey.RETRIEVAL_STRATEGY
|
||||
] = prompt.profile_manager.retrieval_strategy
|
||||
output[
|
||||
JsonSchemaKey.SIMILARITY_TOP_K
|
||||
] = prompt.profile_manager.similarity_top_k
|
||||
output[JsonSchemaKey.RETRIEVAL_STRATEGY] = (
|
||||
prompt.profile_manager.retrieval_strategy
|
||||
)
|
||||
output[JsonSchemaKey.SIMILARITY_TOP_K] = (
|
||||
prompt.profile_manager.similarity_top_k
|
||||
)
|
||||
output[JsonSchemaKey.SECTION] = prompt.profile_manager.section
|
||||
output[JsonSchemaKey.REINDEX] = prompt.profile_manager.reindex
|
||||
output[JsonSchemaKey.EMBEDDING_SUFFIX] = embedding_suffix
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
from backend.serializers import AuditSerializer
|
||||
from rest_framework import serializers
|
||||
|
||||
from backend.serializers import AuditSerializer
|
||||
|
||||
from .models import PromptStudioRegistry
|
||||
|
||||
|
||||
|
||||
@@ -1,12 +1,13 @@
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from backend.constants import FieldLengthConstants as FieldLength
|
||||
from django.conf import settings
|
||||
from pipeline.manager import PipelineManager
|
||||
from rest_framework import serializers
|
||||
from scheduler.constants import SchedulerConstants as SC
|
||||
|
||||
from backend.constants import FieldLengthConstants as FieldLength
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
JOB_NAME_LENGTH = 255
|
||||
@@ -24,7 +25,9 @@ class JobKwargsSerializer(serializers.Serializer):
|
||||
|
||||
class SchedulerKwargsSerializer(serializers.Serializer):
|
||||
coalesce = serializers.BooleanField()
|
||||
misfire_grace_time = serializers.IntegerField(allow_null=True, required=False)
|
||||
misfire_grace_time = serializers.IntegerField(
|
||||
allow_null=True, required=False
|
||||
)
|
||||
max_instances = serializers.IntegerField()
|
||||
replace_existing = serializers.BooleanField()
|
||||
|
||||
@@ -41,10 +44,10 @@ class AddJobSerializer(serializers.Serializer):
|
||||
def to_internal_value(self, data: dict[str, Any]) -> dict[str, Any]:
|
||||
if SC.NAME not in data:
|
||||
data[SC.NAME] = f"Job-{data[SC.ID]}"
|
||||
data[
|
||||
SC.JOB_KWARGS
|
||||
] = PipelineManager.get_pipeline_execution_data_for_scheduled_run(
|
||||
pipeline_id=data[SC.ID]
|
||||
data[SC.JOB_KWARGS] = (
|
||||
PipelineManager.get_pipeline_execution_data_for_scheduled_run(
|
||||
pipeline_id=data[SC.ID]
|
||||
)
|
||||
)
|
||||
data[SC.SCHEDULER_KWARGS] = settings.SCHEDULER_KWARGS
|
||||
return super().to_internal_value(data) # type: ignore
|
||||
|
||||
@@ -15,8 +15,10 @@ class InvitationViewSet(viewsets.ViewSet):
|
||||
@action(detail=False, methods=["GET"])
|
||||
def list_invitations(self, request: Request) -> Response:
|
||||
auth_controller = AuthenticationController()
|
||||
invitations: list[MemberInvitation] = auth_controller.get_user_invitations(
|
||||
organization_id=request.org_id,
|
||||
invitations: list[MemberInvitation] = (
|
||||
auth_controller.get_user_invitations(
|
||||
organization_id=request.org_id,
|
||||
)
|
||||
)
|
||||
serialized_members = ListInvitationsResponseSerializer(
|
||||
invitations, many=True
|
||||
|
||||
@@ -5,7 +5,10 @@ from django.db import models
|
||||
class OrganizationMember(models.Model):
|
||||
member_id = models.BigAutoField(primary_key=True)
|
||||
user = models.OneToOneField(
|
||||
User, on_delete=models.CASCADE, default=None, related_name="organization_member"
|
||||
User,
|
||||
on_delete=models.CASCADE,
|
||||
default=None,
|
||||
related_name="organization_member",
|
||||
)
|
||||
role = models.CharField()
|
||||
|
||||
|
||||
@@ -79,7 +79,8 @@ class OrganizationUserViewSet(viewsets.ViewSet):
|
||||
# z_code = request.COOKIES.get(Cookie.Z_CODE)
|
||||
user_info = auth_controller.get_user_info(request)
|
||||
role = auth_controller.get_organization_members_by_user(
|
||||
request.user)
|
||||
request.user
|
||||
)
|
||||
if not user_info:
|
||||
return Response(
|
||||
status=status.HTTP_404_NOT_FOUND,
|
||||
@@ -89,10 +90,10 @@ class OrganizationUserViewSet(viewsets.ViewSet):
|
||||
# Temporary fix for getting user role along with user info.
|
||||
# Proper implementation would be adding role field to UserInfo.
|
||||
serialized_user_info["is_admin"] = auth_controller.is_admin_by_role(
|
||||
role.role)
|
||||
role.role
|
||||
)
|
||||
return Response(
|
||||
status=status.HTTP_200_OK, data={
|
||||
"user": serialized_user_info}
|
||||
status=status.HTTP_200_OK, data={"user": serialized_user_info}
|
||||
)
|
||||
except Exception as error:
|
||||
Logger.error(f"Error while get User : {error}")
|
||||
@@ -112,7 +113,6 @@ class OrganizationUserViewSet(viewsets.ViewSet):
|
||||
)
|
||||
|
||||
response_serializer = UserInviteResponseSerializer(
|
||||
|
||||
invite_response, many=True
|
||||
)
|
||||
|
||||
@@ -157,11 +157,12 @@ class OrganizationUserViewSet(viewsets.ViewSet):
|
||||
def get_organization_members(self, request: Request) -> Response:
|
||||
auth_controller = AuthenticationController()
|
||||
if request.org_id:
|
||||
members: list[
|
||||
OrganizationMember
|
||||
] = auth_controller.get_organization_members_by_org_id()
|
||||
members: list[OrganizationMember] = (
|
||||
auth_controller.get_organization_members_by_org_id()
|
||||
)
|
||||
serialized_members = OrganizationMemberSerializer(
|
||||
members, many=True).data
|
||||
members, many=True
|
||||
).data
|
||||
return Response(
|
||||
status=status.HTTP_200_OK,
|
||||
data={"message": "success", "members": serialized_members},
|
||||
|
||||
@@ -37,7 +37,9 @@ def get_roles(request: Request) -> Response:
|
||||
@api_view(["POST"])
|
||||
def reset_password(request: Request) -> Response:
|
||||
auth_controller = AuthenticationController()
|
||||
data: ResetUserPasswordDto = auth_controller.reset_user_password(request.user)
|
||||
data: ResetUserPasswordDto = auth_controller.reset_user_password(
|
||||
request.user
|
||||
)
|
||||
if data.status:
|
||||
return Response(
|
||||
status=status.HTTP_200_OK,
|
||||
|
||||
@@ -60,7 +60,7 @@ class ToolProcessor:
|
||||
)
|
||||
schema_json: dict[str, Any] = schema.to_dict()
|
||||
return schema_json
|
||||
|
||||
|
||||
@staticmethod
|
||||
def update_schema_with_adapter_configurations(
|
||||
schema: Spec, user: User
|
||||
@@ -134,12 +134,12 @@ class ToolProcessor:
|
||||
def get_tool_list(user: User) -> list[dict[str, Any]]:
|
||||
"""Function to get a list of tools."""
|
||||
tool_registry = ToolRegistry()
|
||||
prompt_studio_tools: list[
|
||||
dict[str, Any]
|
||||
] = PromptStudioRegistryHelper.fetch_json_for_registry(user)
|
||||
tool_list: list[
|
||||
dict[str, Any]
|
||||
] = tool_registry.fetch_tools_descriptions()
|
||||
prompt_studio_tools: list[dict[str, Any]] = (
|
||||
PromptStudioRegistryHelper.fetch_json_for_registry(user)
|
||||
)
|
||||
tool_list: list[dict[str, Any]] = (
|
||||
tool_registry.fetch_tools_descriptions()
|
||||
)
|
||||
tool_list = tool_list + prompt_studio_tools
|
||||
return tool_list
|
||||
|
||||
|
||||
@@ -3,7 +3,6 @@ import uuid
|
||||
from typing import Any
|
||||
|
||||
from account.custom_exceptions import DuplicateData
|
||||
from backend.constants import RequestKey
|
||||
from django.db import IntegrityError
|
||||
from django.db.models.query import QuerySet
|
||||
from rest_framework import serializers, status, viewsets
|
||||
@@ -14,7 +13,10 @@ from rest_framework.versioning import URLPathVersioning
|
||||
from tool_instance.constants import ToolInstanceErrors
|
||||
from tool_instance.constants import ToolInstanceKey as TIKey
|
||||
from tool_instance.constants import ToolKey
|
||||
from tool_instance.exceptions import FetchToolListFailed, ToolFunctionIsMandatory
|
||||
from tool_instance.exceptions import (
|
||||
FetchToolListFailed,
|
||||
ToolFunctionIsMandatory,
|
||||
)
|
||||
from tool_instance.models import ToolInstance
|
||||
from tool_instance.serializers import (
|
||||
ToolInstanceReorderSerializer as TIReorderSerializer,
|
||||
@@ -25,6 +27,8 @@ from tool_instance.tool_processor import ToolProcessor
|
||||
from utils.filtering import FilterHelper
|
||||
from workflow_manager.workflow.constants import WorkflowKey
|
||||
|
||||
from backend.constants import RequestKey
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -51,7 +55,8 @@ def get_tool_list(request: Request) -> Response:
|
||||
try:
|
||||
logger.info("Fetching tools from the tool registry...")
|
||||
return Response(
|
||||
data=ToolProcessor.get_tool_list(request.user), status=status.HTTP_200_OK
|
||||
data=ToolProcessor.get_tool_list(request.user),
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
except Exception as exc:
|
||||
logger.error(f"Failed to fetch tools: {exc}")
|
||||
@@ -117,10 +122,10 @@ class ToolInstanceViewSet(viewsets.ModelViewSet):
|
||||
instance (ToolInstance): Instance being deleted.
|
||||
"""
|
||||
lookup = {"step__gt": instance.step}
|
||||
next_tool_instances: list[
|
||||
ToolInstance
|
||||
] = ToolInstanceHelper.get_tool_instances_by_workflow(
|
||||
instance.workflow.id, TIKey.STEP, lookup=lookup
|
||||
next_tool_instances: list[ToolInstance] = (
|
||||
ToolInstanceHelper.get_tool_instances_by_workflow(
|
||||
instance.workflow.id, TIKey.STEP, lookup=lookup
|
||||
)
|
||||
)
|
||||
super().perform_destroy(instance)
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import os
|
||||
import threading
|
||||
from typing import Any
|
||||
from enum import Enum
|
||||
from typing import Any
|
||||
|
||||
|
||||
class ConcurrencyMode(Enum):
|
||||
@@ -14,10 +14,8 @@ class Exceptions:
|
||||
|
||||
|
||||
class StateStore:
|
||||
|
||||
mode = os.environ.get(
|
||||
"CONCURRENCY_MODE", ConcurrencyMode.THREAD
|
||||
)
|
||||
|
||||
mode = os.environ.get("CONCURRENCY_MODE", ConcurrencyMode.THREAD)
|
||||
# Thread-safe storage.
|
||||
thread_local = threading.local()
|
||||
|
||||
|
||||
@@ -28,11 +28,15 @@ def make_http_request(
|
||||
if verb == HTTPMethod.GET:
|
||||
response = pyrequests.get(url, params=params, headers=headers)
|
||||
elif verb == HTTPMethod.POST:
|
||||
response = pyrequests.post(url, json=data, params=params, headers=headers)
|
||||
response = pyrequests.post(
|
||||
url, json=data, params=params, headers=headers
|
||||
)
|
||||
elif verb == HTTPMethod.DELETE:
|
||||
response = pyrequests.delete(url, params=params, headers=headers)
|
||||
else:
|
||||
raise ValueError("Invalid HTTP verb. Supported verbs: GET, POST, DELETE")
|
||||
raise ValueError(
|
||||
"Invalid HTTP verb. Supported verbs: GET, POST, DELETE"
|
||||
)
|
||||
|
||||
response.raise_for_status()
|
||||
return_val: str = (
|
||||
|
||||
@@ -4,15 +4,16 @@ from typing import Any
|
||||
from django.conf import settings
|
||||
from django.db import connection
|
||||
from fsspec import AbstractFileSystem
|
||||
from unstract.connectors.filesystems import connectors
|
||||
from unstract.connectors.filesystems.unstract_file_system import (
|
||||
UnstractFileSystem,
|
||||
)
|
||||
from unstract.workflow_execution.execution_file_handler import (
|
||||
ExecutionFileHandler,
|
||||
)
|
||||
from utils.constants import Common
|
||||
|
||||
from unstract.connectors.filesystems import connectors
|
||||
from unstract.connectors.filesystems.unstract_file_system import (
|
||||
UnstractFileSystem,
|
||||
)
|
||||
|
||||
|
||||
class BaseConnector(ExecutionFileHandler):
|
||||
"""Base class for connectors providing common methods and utilities."""
|
||||
|
||||
@@ -14,8 +14,8 @@ class WorkflowEndpointViewSet(viewsets.ModelViewSet):
|
||||
queryset = WorkflowEndpoint.objects.all()
|
||||
serializer_class = WorkflowEndpointSerializer
|
||||
|
||||
def get_queryset(self) -> QuerySet:
|
||||
|
||||
def get_queryset(self) -> QuerySet:
|
||||
|
||||
queryset = (
|
||||
WorkflowEndpoint.objects.all()
|
||||
.select_related("workflow")
|
||||
|
||||
@@ -37,6 +37,7 @@ class InvalidRequest(APIException):
|
||||
status_code = 400
|
||||
default_detail = "Invalid Request"
|
||||
|
||||
|
||||
class MissingEnvException(APIException):
|
||||
status_code = 500
|
||||
default_detail = "At least one active platform key should be available."
|
||||
@@ -73,4 +74,4 @@ class WorkflowExecutionBadRequestException(APIException):
|
||||
|
||||
class ToolValidationError(APIException):
|
||||
status_code = 400
|
||||
default_detail = "Tool validation error"
|
||||
default_detail = "Tool validation error"
|
||||
|
||||
@@ -6,13 +6,14 @@ from rest_framework.request import Request
|
||||
from tool_instance.constants import ToolInstanceKey as TIKey
|
||||
from tool_instance.exceptions import ToolInstantiationError
|
||||
from tool_instance.tool_processor import ToolProcessor
|
||||
from unstract.core.llm_workflow_generator.llm_interface import LLMInterface
|
||||
from unstract.tool_registry.dto import Tool
|
||||
from workflow_manager.workflow.constants import WorkflowKey
|
||||
from workflow_manager.workflow.dto import ProvisionalWorkflow
|
||||
from workflow_manager.workflow.exceptions import WorkflowGenerationError
|
||||
from workflow_manager.workflow.models.workflow import Workflow as WorkflowModel
|
||||
|
||||
from unstract.core.llm_workflow_generator.llm_interface import LLMInterface
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -69,8 +70,8 @@ class WorkflowGenerator:
|
||||
self._request = request
|
||||
|
||||
def generate_workflow(self, tools: list[Tool]) -> None:
|
||||
"""Used to talk to the GPT model through core and obtain a
|
||||
provisional workflow for the user to work with."""
|
||||
"""Used to talk to the GPT model through core and obtain a provisional
|
||||
workflow for the user to work with."""
|
||||
self._provisional_wf = self._get_provisional_workflow(tools)
|
||||
|
||||
@staticmethod
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
import logging
|
||||
from typing import Any, Optional, Union
|
||||
|
||||
from backend.constants import RequestKey
|
||||
from backend.serializers import AuditSerializer
|
||||
from project.constants import ProjectKey
|
||||
from rest_framework.serializers import (
|
||||
CharField,
|
||||
@@ -16,11 +14,17 @@ from rest_framework.serializers import (
|
||||
from tool_instance.serializers import ToolInstanceSerializer
|
||||
from tool_instance.tool_instance_helper import ToolInstanceHelper
|
||||
from workflow_manager.endpoint.models import WorkflowEndpoint
|
||||
from workflow_manager.workflow.constants import WorkflowExecutionKey, WorkflowKey
|
||||
from workflow_manager.workflow.constants import (
|
||||
WorkflowExecutionKey,
|
||||
WorkflowKey,
|
||||
)
|
||||
from workflow_manager.workflow.exceptions import WorkflowGenerationError
|
||||
from workflow_manager.workflow.generator import WorkflowGenerator
|
||||
from workflow_manager.workflow.models.workflow import Workflow
|
||||
|
||||
from backend.constants import RequestKey
|
||||
from backend.serializers import AuditSerializer
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
|
||||
@@ -215,10 +215,10 @@ class WorkflowHelper:
|
||||
workflow_execution: Optional[WorkflowExecution] = None,
|
||||
execution_mode: Optional[tuple[str, str]] = None,
|
||||
) -> ExecutionResponse:
|
||||
tool_instances: list[
|
||||
ToolInstance
|
||||
] = ToolInstanceHelper.get_tool_instances_by_workflow(
|
||||
workflow.id, ToolInstanceKey.STEP
|
||||
tool_instances: list[ToolInstance] = (
|
||||
ToolInstanceHelper.get_tool_instances_by_workflow(
|
||||
workflow.id, ToolInstanceKey.STEP
|
||||
)
|
||||
)
|
||||
|
||||
WorkflowHelper.validate_tool_instances_meta(
|
||||
|
||||
@@ -11,13 +11,18 @@ from odf import teletype, text
|
||||
from odf.opendocument import load
|
||||
|
||||
logging.basicConfig(
|
||||
level=logging.INFO, format="%(asctime)s %(levelname)s %(name)s : %(message)s"
|
||||
level=logging.INFO,
|
||||
format="%(asctime)s %(levelname)s %(name)s : %(message)s",
|
||||
)
|
||||
|
||||
UPLOAD_FOLDER = os.environ.get("UPLOAD_FOLDER", "/tmp/document_service/upload")
|
||||
PROCESS_FOLDER = os.environ.get("PROCESS_FOLDER", "/tmp/document_service/process")
|
||||
PROCESS_FOLDER = os.environ.get(
|
||||
"PROCESS_FOLDER", "/tmp/document_service/process"
|
||||
)
|
||||
LIBREOFFICE_PYTHON = os.environ.get("LIBREOFFICE_PYTHON", "/usr/bin/python3")
|
||||
MAX_FILE_SIZE = int(os.environ.get("MAX_FILE_SIZE", 10485760)) # 10 * 1024 * 1024
|
||||
MAX_FILE_SIZE = int(
|
||||
os.environ.get("MAX_FILE_SIZE", 10485760)
|
||||
) # 10 * 1024 * 1024
|
||||
SERVICE_API_TOKEN = os.environ.get("SERVICE_API_TOKEN", "")
|
||||
|
||||
app = Flask("document_service")
|
||||
@@ -99,7 +104,9 @@ def upload_file():
|
||||
redis_host = os.environ.get("REDIS_HOST")
|
||||
redis_port = int(os.environ.get("REDIS_PORT"))
|
||||
redis_password = os.environ.get("REDIS_PASSWORD")
|
||||
r = redis.Redis(host=redis_host, port=redis_port, password=redis_password)
|
||||
r = redis.Redis(
|
||||
host=redis_host, port=redis_port, password=redis_password
|
||||
)
|
||||
# TODO: Create a file reaper process to look at uploaded time and delete
|
||||
redis_key = f"upload_time:{account_id}_{file_name}"
|
||||
current_timestamp = int(time.time())
|
||||
@@ -123,7 +130,9 @@ def find_and_replace():
|
||||
output_format = request.args.get("output_format").lower()
|
||||
find_and_replace_text = request.json
|
||||
|
||||
app.logger.info(f"Find and replace for file {file_name} for account {account_id}")
|
||||
app.logger.info(
|
||||
f"Find and replace for file {file_name} for account {account_id}"
|
||||
)
|
||||
app.logger.info(f"Output format: {output_format}")
|
||||
|
||||
if output_format not in ["pdf"]:
|
||||
@@ -143,7 +152,9 @@ def find_and_replace():
|
||||
try:
|
||||
command = f"{LIBREOFFICE_PYTHON} -m unoserver.converter --convert-to odt \
|
||||
--filter writer8 {file_namex} {file_name_odt}"
|
||||
result = subprocess.run(command, shell=True, capture_output=True, text=True)
|
||||
result = subprocess.run(
|
||||
command, shell=True, capture_output=True, text=True
|
||||
)
|
||||
app.logger.info(result)
|
||||
if result.returncode != 0:
|
||||
app.logger.error(
|
||||
@@ -155,7 +166,9 @@ def find_and_replace():
|
||||
app.logger.info(
|
||||
f"File converted to ODT format successfully! {file_name_odt}"
|
||||
)
|
||||
app.logger.info(f"ODT convertion result: {result.stdout} | {result.stderr}")
|
||||
app.logger.info(
|
||||
f"ODT convertion result: {result.stdout} | {result.stderr}"
|
||||
)
|
||||
except Exception as e:
|
||||
app.logger.error(f"Error while converting file to ODT format: {e}")
|
||||
return "Error while converting file to ODT format!", 500
|
||||
@@ -169,9 +182,13 @@ def find_and_replace():
|
||||
replace_str = find_and_replace_text[find_str]
|
||||
for element in doc.getElementsByType(text.Span):
|
||||
if find_str in teletype.extractText(element):
|
||||
app.logger.info(f"Found {find_str} in {teletype.extractText(element)}")
|
||||
app.logger.info(
|
||||
f"Found {find_str} in {teletype.extractText(element)}"
|
||||
)
|
||||
new_element = text.Span()
|
||||
new_element.setAttribute("stylename", element.getAttribute("stylename"))
|
||||
new_element.setAttribute(
|
||||
"stylename", element.getAttribute("stylename")
|
||||
)
|
||||
t = teletype.extractText(element)
|
||||
t = t.replace(find_str, replace_str)
|
||||
new_element.addText(t)
|
||||
@@ -188,7 +205,9 @@ def find_and_replace():
|
||||
f"{LIBREOFFICE_PYTHON} -m unoserver.converter --convert-to pdf "
|
||||
f"--filter writer_pdf_Export {file_name_odt} {file_name_output}"
|
||||
)
|
||||
result = subprocess.run(command, shell=True, capture_output=True, text=True)
|
||||
result = subprocess.run(
|
||||
command, shell=True, capture_output=True, text=True
|
||||
)
|
||||
if result.returncode != 0:
|
||||
app.logger.error(
|
||||
f"Failed to convert file to {output_format} format: "
|
||||
@@ -200,9 +219,13 @@ def find_and_replace():
|
||||
f"File converted to {output_format} format successfully! "
|
||||
f"{file_name_output}"
|
||||
)
|
||||
app.logger.info(f"ODT convertion result: {result.stdout} | {result.stderr}")
|
||||
app.logger.info(
|
||||
f"ODT convertion result: {result.stdout} | {result.stderr}"
|
||||
)
|
||||
except Exception as e:
|
||||
app.logger.error(f"Error while converting file to {output_format} format: {e}")
|
||||
app.logger.error(
|
||||
f"Error while converting file to {output_format} format: {e}"
|
||||
)
|
||||
return f"Error while converting file to {output_format} format!", 500
|
||||
return send_file(file_name_output, as_attachment=True)
|
||||
|
||||
|
||||
@@ -1 +1 @@
|
||||
<svg id="Layer_1" enable-background="new 0 0 496 496" height="512" viewBox="0 0 496 496" width="512" xmlns="http://www.w3.org/2000/svg"><path d="m480 80v336c0 17.67-14.33 32-32 32h-400c-17.67 0-32-14.33-32-32v-336c0-17.67 14.33-32 32-32h400c17.67 0 32 14.33 32 32z" fill="#b9dcff"/><path d="m448.107 98h-350.107c-17.673 0-32 14.327-32 32v286.107c0 32.544-50 34.259-50-.107v-336c0-17.673 14.327-32 32-32h400c34.366 0 32.65 50 .107 50z" fill="#d1e7ff"/><path d="m480 80v64h-464v-64c0-17.67 14.33-32 32-32h400c17.67 0 32 14.33 32 32z" fill="#a2d0ff"/><path d="m448.107 98h-350.107c-17.673 0-32 14.327-32 32v14h-50v-64c0-17.673 14.327-32 32-32h400c34.366 0 32.65 50 .107 50z" fill="#b9dcff"/><path d="m448 32h-400c-26.467 0-48 21.533-48 48v336c0 26.467 21.533 48 48 48h400c26.467 0 48-21.533 48-48v-336c0-26.467-21.533-48-48-48zm16 384c0 8.823-7.178 16-16 16h-400c-8.822 0-16-7.177-16-16v-256h400c8.836 0 16-7.164 16-16s-7.164-16-16-16h-400v-48c0-8.823 7.178-16 16-16h400c8.822 0 16 7.177 16 16zm-318-171.845v89.689c0 8.837-7.164 16-16 16s-16-7.163-16-16v-28.844h-14v28.845c0 8.837-7.164 16-16 16s-16-7.163-16-16v-89.689c0-8.837 7.164-16 16-16s16 7.163 16 16v28.844h14v-28.845c0-8.837 7.164-16 16-16s16 7.164 16 16zm95-.155c0 8.836-7.164 16-16 16h-8v74c0 8.837-7.164 16-16 16s-16-7.163-16-16v-74h-8c-8.836 0-16-7.164-16-16s7.164-16 16-16h48c8.836 0 16 7.164 16 16zm142.513-15.845h-18.203c-8.836 0-16 7.163-16 16v89.689c0 8.837 7.164 16 16 16s16-7.163 16-16v-15.654c27.818 0 46.879-20.017 46.879-45.358 0-24.635-20.041-44.677-44.676-44.677zm0 58.035h-2.203v-26.034h2.203c6.99 0 12.676 5.687 12.676 12.677 0 8.435-6.231 13.357-12.676 13.357zm-48.513-42.19c0 8.836-7.164 16-16 16h-8v74c0 8.837-7.164 16-16 16s-16-7.163-16-16v-74h-8c-8.836 0-16-7.164-16-16s7.164-16 16-16h48c8.836 0 16 7.164 16 16zm-267.87-132.31c-8.308 1.599-16.836-3.352-18.82-12.56-1.613-8.38 3.412-16.844 12.56-18.82 8.457-1.624 16.868 3.496 18.82 12.56 1.623 8.433-3.464 16.855-12.56 18.82zm48 0c-8.394 1.616-16.853-3.429-18.82-12.56-1.613-8.379 3.411-16.844 12.56-18.82 8.457-1.624 16.868 3.496 18.82 12.56 1.623 8.431-3.463 16.855-12.56 18.82zm48 0c-8.399 1.617-16.854-3.433-18.82-12.56-1.613-8.378 3.41-16.844 12.56-18.82 8.446-1.622 16.865 3.484 18.82 12.56 1.623 8.431-3.463 16.855-12.56 18.82z" fill="#2e58ff"/></svg>
|
||||
<svg id="Layer_1" enable-background="new 0 0 496 496" height="512" viewBox="0 0 496 496" width="512" xmlns="http://www.w3.org/2000/svg"><path d="m480 80v336c0 17.67-14.33 32-32 32h-400c-17.67 0-32-14.33-32-32v-336c0-17.67 14.33-32 32-32h400c17.67 0 32 14.33 32 32z" fill="#b9dcff"/><path d="m448.107 98h-350.107c-17.673 0-32 14.327-32 32v286.107c0 32.544-50 34.259-50-.107v-336c0-17.673 14.327-32 32-32h400c34.366 0 32.65 50 .107 50z" fill="#d1e7ff"/><path d="m480 80v64h-464v-64c0-17.67 14.33-32 32-32h400c17.67 0 32 14.33 32 32z" fill="#a2d0ff"/><path d="m448.107 98h-350.107c-17.673 0-32 14.327-32 32v14h-50v-64c0-17.673 14.327-32 32-32h400c34.366 0 32.65 50 .107 50z" fill="#b9dcff"/><path d="m448 32h-400c-26.467 0-48 21.533-48 48v336c0 26.467 21.533 48 48 48h400c26.467 0 48-21.533 48-48v-336c0-26.467-21.533-48-48-48zm16 384c0 8.823-7.178 16-16 16h-400c-8.822 0-16-7.177-16-16v-256h400c8.836 0 16-7.164 16-16s-7.164-16-16-16h-400v-48c0-8.823 7.178-16 16-16h400c8.822 0 16 7.177 16 16zm-318-171.845v89.689c0 8.837-7.164 16-16 16s-16-7.163-16-16v-28.844h-14v28.845c0 8.837-7.164 16-16 16s-16-7.163-16-16v-89.689c0-8.837 7.164-16 16-16s16 7.163 16 16v28.844h14v-28.845c0-8.837 7.164-16 16-16s16 7.164 16 16zm95-.155c0 8.836-7.164 16-16 16h-8v74c0 8.837-7.164 16-16 16s-16-7.163-16-16v-74h-8c-8.836 0-16-7.164-16-16s7.164-16 16-16h48c8.836 0 16 7.164 16 16zm142.513-15.845h-18.203c-8.836 0-16 7.163-16 16v89.689c0 8.837 7.164 16 16 16s16-7.163 16-16v-15.654c27.818 0 46.879-20.017 46.879-45.358 0-24.635-20.041-44.677-44.676-44.677zm0 58.035h-2.203v-26.034h2.203c6.99 0 12.676 5.687 12.676 12.677 0 8.435-6.231 13.357-12.676 13.357zm-48.513-42.19c0 8.836-7.164 16-16 16h-8v74c0 8.837-7.164 16-16 16s-16-7.163-16-16v-74h-8c-8.836 0-16-7.164-16-16s7.164-16 16-16h48c8.836 0 16 7.164 16 16zm-267.87-132.31c-8.308 1.599-16.836-3.352-18.82-12.56-1.613-8.38 3.412-16.844 12.56-18.82 8.457-1.624 16.868 3.496 18.82 12.56 1.623 8.433-3.464 16.855-12.56 18.82zm48 0c-8.394 1.616-16.853-3.429-18.82-12.56-1.613-8.379 3.411-16.844 12.56-18.82 8.457-1.624 16.868 3.496 18.82 12.56 1.623 8.431-3.463 16.855-12.56 18.82zm48 0c-8.399 1.617-16.854-3.433-18.82-12.56-1.613-8.378 3.41-16.844 12.56-18.82 8.446-1.622 16.865 3.484 18.82 12.56 1.623 8.431-3.463 16.855-12.56 18.82z" fill="#2e58ff"/></svg>
|
||||
|
||||
|
Before Width: | Height: | Size: 2.2 KiB After Width: | Height: | Size: 2.2 KiB |
@@ -1 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="64" height="64"><path d="M14.48 58.196L.558 34.082c-.744-1.288-.744-2.876 0-4.164L14.48 5.805c.743-1.287 2.115-2.08 3.6-2.082h27.857c1.48.007 2.845.8 3.585 2.082l13.92 24.113c.744 1.288.744 2.876 0 4.164L49.52 58.196c-.743 1.287-2.115 2.08-3.6 2.082H18.07c-1.483-.005-2.85-.798-3.593-2.082z" fill="#4386fa"/><path d="M40.697 24.235s3.87 9.283-1.406 14.545-14.883 1.894-14.883 1.894L43.95 60.27h1.984c1.486-.002 2.858-.796 3.6-2.082L58.75 42.23z" opacity=".1"/><path d="M45.267 43.23L41 38.953a.67.67 0 0 0-.158-.12 11.63 11.63 0 1 0-2.032 2.037.67.67 0 0 0 .113.15l4.277 4.277a.67.67 0 0 0 .947 0l1.12-1.12a.67.67 0 0 0 0-.947zM31.64 40.464a8.75 8.75 0 1 1 8.749-8.749 8.75 8.75 0 0 1-8.749 8.749zm-5.593-9.216v3.616c.557.983 1.363 1.803 2.338 2.375v-6.013zm4.375-2.998v9.772a6.45 6.45 0 0 0 2.338 0V28.25zm6.764 6.606v-2.142H34.85v4.5a6.43 6.43 0 0 0 2.338-2.368z" fill="#fff"/></svg>
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="64" height="64"><path d="M14.48 58.196L.558 34.082c-.744-1.288-.744-2.876 0-4.164L14.48 5.805c.743-1.287 2.115-2.08 3.6-2.082h27.857c1.48.007 2.845.8 3.585 2.082l13.92 24.113c.744 1.288.744 2.876 0 4.164L49.52 58.196c-.743 1.287-2.115 2.08-3.6 2.082H18.07c-1.483-.005-2.85-.798-3.593-2.082z" fill="#4386fa"/><path d="M40.697 24.235s3.87 9.283-1.406 14.545-14.883 1.894-14.883 1.894L43.95 60.27h1.984c1.486-.002 2.858-.796 3.6-2.082L58.75 42.23z" opacity=".1"/><path d="M45.267 43.23L41 38.953a.67.67 0 0 0-.158-.12 11.63 11.63 0 1 0-2.032 2.037.67.67 0 0 0 .113.15l4.277 4.277a.67.67 0 0 0 .947 0l1.12-1.12a.67.67 0 0 0 0-.947zM31.64 40.464a8.75 8.75 0 1 1 8.749-8.749 8.75 8.75 0 0 1-8.749 8.749zm-5.593-9.216v3.616c.557.983 1.363 1.803 2.338 2.375v-6.013zm4.375-2.998v9.772a6.45 6.45 0 0 0 2.338 0V28.25zm6.764 6.606v-2.142H34.85v4.5a6.43 6.43 0 0 0 2.338-2.368z" fill="#fff"/></svg>
|
||||
|
||||
|
Before Width: | Height: | Size: 932 B After Width: | Height: | Size: 933 B |
@@ -1 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" height="24" viewBox="0 -960 960 960" width="24"><path d="M753.846-361.923q-46.769 0-78.577-32-31.807-32-31.807-78.769 0-46.769 31.807-78.769 31.808-31.999 78.577-31.999 46.384 0 78.384 31.999 32 32 32 78.769 0 46.769-32 78.769-32 32-78.384 32ZM366.539-285.77l-42.153-42.383 113.385-114.54H95.386v-59.998h342.77l-113.77-114.77 42.153-42.538 186.537 187.307L366.539-285.77Z"/></svg>
|
||||
<svg xmlns="http://www.w3.org/2000/svg" height="24" viewBox="0 -960 960 960" width="24"><path d="M753.846-361.923q-46.769 0-78.577-32-31.807-32-31.807-78.769 0-46.769 31.807-78.769 31.808-31.999 78.577-31.999 46.384 0 78.384 31.999 32 32 32 78.769 0 46.769-32 78.769-32 32-78.384 32ZM366.539-285.77l-42.153-42.383 113.385-114.54H95.386v-59.998h342.77l-113.77-114.77 42.153-42.538 186.537 187.307L366.539-285.77Z"/></svg>
|
||||
|
||||
|
Before Width: | Height: | Size: 420 B After Width: | Height: | Size: 421 B |
@@ -18,4 +18,4 @@
|
||||
.step-icon {
|
||||
opacity: 0.60;
|
||||
image-rendering: pixelated;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -27,4 +27,4 @@
|
||||
}
|
||||
.tool-dragging {
|
||||
background-color: #DAE3EC;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -62,4 +62,4 @@
|
||||
|
||||
.ds-set-card-select {
|
||||
width: 120px;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,4 +4,4 @@
|
||||
border-radius: 5px;
|
||||
border: 1px solid #FFD2DB;
|
||||
padding: 5px;
|
||||
}
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user