Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/main' into fix/brrop
Browse files Browse the repository at this point in the history
* origin/main: (93 commits)
  chore: node help link (langgenius#4795)
  Add SearchApi tools (langgenius#4648)
  chore: improve node user experience (langgenius#4792)
  fix: in tool and http node of iteration can not show item var correctly (langgenius#4791)
  feat: Add logging warning when MAIL_TYPE is not set (langgenius#4771)
  fix organize agent's history messages without recalculating tokens (langgenius#4324)
  feat: support define tags in tool yaml (langgenius#4763)
  feat: update ernie model (langgenius#4756)
  fix: incorrect workflow max call depth (langgenius#4759)
  feat: support baichuan3 turbo, baichuan3 turbo 128k, and baichuan4 (langgenius#4762)
  fix: confusing chart description (langgenius#4760)
  style: fix annotation panel display misalignment (langgenius#4750)
  Add WORKFLOW_CALL_MAX_DEPTH env var. (langgenius#4713)
  Fix/4742 ollama num gpu option not consistent with allowed values (langgenius#4751)
  style: the 'all' of add tool panel should contain workflow tools (langgenius#4755)
  fix: Corrected schema link in model_runtime's README.md (langgenius#4757)
  fix: optimize sticky header styles z-index in tools - ProviderList component (langgenius#4746)
  fix: workflow run sequence number slow sql (langgenius#4737)
  Show tool i18n name on chat pannel (langgenius#4724)
  fix: Correct context size for banchuan2-53b and banchuan2-turbo (langgenius#4721)
  ...
  • Loading branch information
wangweivic committed May 30, 2024
2 parents 09ac0b7 + 5b2cd8d commit 55d0b69
Show file tree
Hide file tree
Showing 628 changed files with 21,765 additions and 4,242 deletions.
12 changes: 12 additions & 0 deletions api/.env.example
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,9 @@ APP_WEB_URL=http://127.0.0.1:3000
# Files URL
FILES_URL=http://127.0.0.1:5001

# The time in seconds after the signature is rejected
FILES_ACCESS_TIMEOUT=300

# celery configuration
CELERY_BROKER_URL=redis://:difyai123456@localhost:6379/1
CELERY_GLOBAL_PREFIX={celery_global_prefix}
Expand Down Expand Up @@ -145,6 +148,7 @@ NOTION_INTERNAL_SECRET=you-internal-secret

ETL_TYPE=dify
UNSTRUCTURED_API_URL=
UNSTRUCTURED_API_KEY=

SSRF_PROXY_HTTP_URL=
SSRF_PROXY_HTTPS_URL=
Expand Down Expand Up @@ -176,3 +180,11 @@ HTTP_REQUEST_NODE_MAX_TEXT_SIZE=1048576 # 1MB

# Log file path
LOG_FILE=

# Indexing configuration
INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH=1000

# Workflow runtime configuration
WORKFLOW_MAX_EXECUTION_STEPS=50
WORKFLOW_MAX_EXECUTION_TIME=600
WORKFLOW_CALL_MAX_DEPTH=5
55 changes: 55 additions & 0 deletions api/commands.py
Original file line number Diff line number Diff line change
Expand Up @@ -448,9 +448,64 @@ def convert_to_agent_apps():
click.echo(click.style('Congratulations! Converted {} agent apps.'.format(len(proceeded_app_ids)), fg='green'))


@click.command('add-qdrant-doc-id-index', help='add qdrant doc_id index.')
@click.option('--field', default='metadata.doc_id', prompt=False, help='index field , default is metadata.doc_id.')
def add_qdrant_doc_id_index(field: str):
click.echo(click.style('Start add qdrant doc_id index.', fg='green'))
config = current_app.config
vector_type = config.get('VECTOR_STORE')
if vector_type != "qdrant":
click.echo(click.style('Sorry, only support qdrant vector store.', fg='red'))
return
create_count = 0

try:
bindings = db.session.query(DatasetCollectionBinding).all()
if not bindings:
click.echo(click.style('Sorry, no dataset collection bindings found.', fg='red'))
return
import qdrant_client
from qdrant_client.http.exceptions import UnexpectedResponse
from qdrant_client.http.models import PayloadSchemaType

from core.rag.datasource.vdb.qdrant.qdrant_vector import QdrantConfig
for binding in bindings:
qdrant_config = QdrantConfig(
endpoint=config.get('QDRANT_URL'),
api_key=config.get('QDRANT_API_KEY'),
root_path=current_app.root_path,
timeout=config.get('QDRANT_CLIENT_TIMEOUT'),
grpc_port=config.get('QDRANT_GRPC_PORT'),
prefer_grpc=config.get('QDRANT_GRPC_ENABLED')
)
try:
client = qdrant_client.QdrantClient(**qdrant_config.to_qdrant_params())
# create payload index
client.create_payload_index(binding.collection_name, field,
field_schema=PayloadSchemaType.KEYWORD)
create_count += 1
except UnexpectedResponse as e:
# Collection does not exist, so return
if e.status_code == 404:
click.echo(click.style(f'Collection not found, collection_name:{binding.collection_name}.', fg='red'))
continue
# Some other error occurred, so re-raise the exception
else:
click.echo(click.style(f'Failed to create qdrant index, collection_name:{binding.collection_name}.', fg='red'))

except Exception as e:
click.echo(click.style('Failed to create qdrant client.', fg='red'))

click.echo(
click.style(f'Congratulations! Create {create_count} collection indexes.',
fg='green'))


def register_commands(app):
app.cli.add_command(reset_password)
app.cli.add_command(reset_email)
app.cli.add_command(reset_encrypt_key_pair)
app.cli.add_command(vdb_migrate)
app.cli.add_command(convert_to_agent_apps)
app.cli.add_command(add_qdrant_doc_id_index)

38 changes: 29 additions & 9 deletions api/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
'SERVICE_API_URL': 'https://api.dify.ai',
'APP_WEB_URL': 'https://udify.app',
'FILES_URL': '',
'FILES_ACCESS_TIMEOUT': 300,
'S3_ADDRESS_STYLE': 'auto',
'STORAGE_TYPE': 'local',
'STORAGE_LOCAL_PATH': 'storage',
Expand Down Expand Up @@ -79,6 +80,10 @@
'KEYWORD_DATA_SOURCE_TYPE': 'database',
'INNER_API': 'False',
'ENTERPRISE_ENABLED': 'False',
'INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH': 1000,
'WORKFLOW_MAX_EXECUTION_STEPS': 50,
'WORKFLOW_MAX_EXECUTION_TIME': 600,
'WORKFLOW_CALL_MAX_DEPTH': 5,
}


Expand Down Expand Up @@ -109,7 +114,7 @@ def __init__(self):
# ------------------------
# General Configurations.
# ------------------------
self.CURRENT_VERSION = "0.6.8"
self.CURRENT_VERSION = "0.6.9"
self.COMMIT_SHA = get_env('COMMIT_SHA')
self.EDITION = get_env('EDITION')
self.DEPLOY_ENV = get_env('DEPLOY_ENV')
Expand Down Expand Up @@ -140,6 +145,10 @@ def __init__(self):
# Url is signed and has expiration time.
self.FILES_URL = get_env('FILES_URL') if get_env('FILES_URL') else self.CONSOLE_API_URL

# File Access Time specifies a time interval in seconds for the file to be accessed.
# The default value is 300 seconds.
self.FILES_ACCESS_TIMEOUT = int(get_env('FILES_ACCESS_TIMEOUT'))

# Your App secret key will be used for securely signing the session cookie
# Make sure you are changing this key for your deployment with a strong key.
# You can generate a strong key using `openssl rand -base64 42`.
Expand Down Expand Up @@ -176,7 +185,8 @@ def __init__(self):
'pool_size': int(get_env('SQLALCHEMY_POOL_SIZE')),
'max_overflow': int(get_env('SQLALCHEMY_MAX_OVERFLOW')),
'pool_recycle': int(get_env('SQLALCHEMY_POOL_RECYCLE')),
'pool_pre_ping': get_bool_env('SQLALCHEMY_POOL_PRE_PING')
'pool_pre_ping': get_bool_env('SQLALCHEMY_POOL_PRE_PING'),
'connect_args': {'options': '-c timezone=UTC'},
}

self.SQLALCHEMY_ECHO = get_bool_env('SQLALCHEMY_ECHO')
Expand Down Expand Up @@ -216,12 +226,12 @@ def __init__(self):
self.AZURE_BLOB_ACCOUNT_KEY = get_env('AZURE_BLOB_ACCOUNT_KEY')
self.AZURE_BLOB_CONTAINER_NAME = get_env('AZURE_BLOB_CONTAINER_NAME')
self.AZURE_BLOB_ACCOUNT_URL = get_env('AZURE_BLOB_ACCOUNT_URL')
self.ALIYUN_OSS_BUCKET_NAME=get_env('ALIYUN_OSS_BUCKET_NAME')
self.ALIYUN_OSS_ACCESS_KEY=get_env('ALIYUN_OSS_ACCESS_KEY')
self.ALIYUN_OSS_SECRET_KEY=get_env('ALIYUN_OSS_SECRET_KEY')
self.ALIYUN_OSS_ENDPOINT=get_env('ALIYUN_OSS_ENDPOINT')
self.ALIYUN_OSS_REGION=get_env('ALIYUN_OSS_REGION')
self.ALIYUN_OSS_AUTH_VERSION=get_env('ALIYUN_OSS_AUTH_VERSION')
self.ALIYUN_OSS_BUCKET_NAME = get_env('ALIYUN_OSS_BUCKET_NAME')
self.ALIYUN_OSS_ACCESS_KEY = get_env('ALIYUN_OSS_ACCESS_KEY')
self.ALIYUN_OSS_SECRET_KEY = get_env('ALIYUN_OSS_SECRET_KEY')
self.ALIYUN_OSS_ENDPOINT = get_env('ALIYUN_OSS_ENDPOINT')
self.ALIYUN_OSS_REGION = get_env('ALIYUN_OSS_REGION')
self.ALIYUN_OSS_AUTH_VERSION = get_env('ALIYUN_OSS_AUTH_VERSION')
self.GOOGLE_STORAGE_BUCKET_NAME = get_env('GOOGLE_STORAGE_BUCKET_NAME')
self.GOOGLE_STORAGE_SERVICE_ACCOUNT_JSON_BASE64 = get_env('GOOGLE_STORAGE_SERVICE_ACCOUNT_JSON_BASE64')

Expand Down Expand Up @@ -286,7 +296,7 @@ def __init__(self):
self.SMTP_USERNAME = get_env('SMTP_USERNAME')
self.SMTP_PASSWORD = get_env('SMTP_PASSWORD')
self.SMTP_USE_TLS = get_bool_env('SMTP_USE_TLS')

# ------------------------
# Workspace Configurations.
# ------------------------
Expand Down Expand Up @@ -314,6 +324,10 @@ def __init__(self):
self.UPLOAD_FILE_BATCH_LIMIT = int(get_env('UPLOAD_FILE_BATCH_LIMIT'))
self.UPLOAD_IMAGE_FILE_SIZE_LIMIT = int(get_env('UPLOAD_IMAGE_FILE_SIZE_LIMIT'))

self.WORKFLOW_MAX_EXECUTION_STEPS = int(get_env('WORKFLOW_MAX_EXECUTION_STEPS'))
self.WORKFLOW_MAX_EXECUTION_TIME = int(get_env('WORKFLOW_MAX_EXECUTION_TIME'))
self.WORKFLOW_CALL_MAX_DEPTH = int(get_env('WORKFLOW_CALL_MAX_DEPTH'))

# Moderation in app Configurations.
self.OUTPUT_MODERATION_BUFFER_SIZE = int(get_env('OUTPUT_MODERATION_BUFFER_SIZE'))

Expand Down Expand Up @@ -366,6 +380,7 @@ def __init__(self):

self.ETL_TYPE = get_env('ETL_TYPE')
self.UNSTRUCTURED_API_URL = get_env('UNSTRUCTURED_API_URL')
self.UNSTRUCTURED_API_KEY = get_env('UNSTRUCTURED_API_KEY')
self.BILLING_ENABLED = get_bool_env('BILLING_ENABLED')
self.CAN_REPLACE_LOGO = get_bool_env('CAN_REPLACE_LOGO')

Expand All @@ -379,3 +394,8 @@ def __init__(self):

self.KEYWORD_DATA_SOURCE_TYPE = get_env('KEYWORD_DATA_SOURCE_TYPE')
self.ENTERPRISE_ENABLED = get_bool_env('ENTERPRISE_ENABLED')

# ------------------------
# Indexing Configurations.
# ------------------------
self.INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH = get_env('INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH')
Loading

0 comments on commit 55d0b69

Please sign in to comment.