I have a simple python app with streamlit and langchain, I am deploying this to Azure via CI/CD with the following YAML definition
stages:
- stage: Build
displayName: Build stage
jobs:
- job: BuildJob
pool:
vmImage: $(vmImageName)
steps:
- task: UsePythonVersion@0
inputs:
versionSpec: '$(pythonVersion)'
displayName: 'Use Python $(pythonVersion)'
- script: |
python -m venv antenv
source antenv/bin/activate
python -m pip install --upgrade pip
pip install setup streamlit
pip install --target="./.python_packages/lib/site-packages" -r ./requirements.txt
workingDirectory: $(projectRoot)
displayName: "Install requirements"
- task: ArchiveFiles@2
displayName: 'Archive files'
inputs:
rootFolderOrFile: '$(projectRoot)'
includeRootFolder: false
archiveType: zip
archiveFile: $(Build.ArtifactStagingDirectory)/$(Build.BuildId).zip
replaceExistingArchive: true
- upload: $(Build.ArtifactStagingDirectory)/$(Build.BuildId).zip
displayName: 'Upload package'
artifact: drop
- stage: Deploy
displayName: 'Deploy Web App'
dependsOn: Build
condition: succeeded()
jobs:
- deployment: DeploymentJob
pool:
vmImage: $(vmImageName)
environment: $(environmentName)
strategy:
runOnce:
deploy:
steps:
- task: UsePythonVersion@0
inputs:
versionSpec: '$(pythonVersion)'
displayName: 'Use Python version'
- task: AzureAppServiceSettings@1
displayName: 'Set App Settings'
inputs:
azureSubscription: 'AzureAIPocPrincipal'
appName: 'test'
resourceGroupName: 'AzureAIPoc'
appSettings: |
[
{
"name": "ENABLE_ORYX_BUILD",
"value": 1
},
{
"name": "SCM_DO_BUILD_DURING_DEPLOYMENT",
"value": 1
},
{
"name": "POST_BUILD_COMMAND",
"value": "pip install -r ./requirements.txt"
}
]
- task: AzureWebApp@1
displayName: 'Deploy Azure Web App : {{ webAppName }}'
inputs:
azureSubscription: 'AzureAIPocPrincipal'
appType: 'webAppLinux'
deployToSlotOrASE: true
resourceGroupName: 'AzureAIPoc'
slotName: 'production'
appName: 'test'
package: '$(Pipeline.Workspace)/drop/$(Build.BuildId).zip'
startUpCommand: 'python -m streamlit run app/home.py --server.port 8000 --server.address 0.0.0.0'
My requirements file is:
langchain==0.0.225
streamlit
openai
python-dotenv
pinecone-client
streamlit-chat
chromadb
tiktoken
pymssql
typing-inspect==0.8.0
typing_extensions==4.5.0
However I am getting the following error:
TypeError: issubclass() arg 1 must be a class
Traceback:
File "/tmp/8db82251b0e58bc/antenv/lib/python3.11/site-packages/streamlit/runtime/scriptrunner/script_runner.py", line 552, in _run_script
exec(code, module.__dict__)
File "/tmp/8db82251b0e58bc/app/pages/xxv0.2.py", line 6, in <module>
import langchain
File "/tmp/8db82251b0e58bc/antenv/lib/python3.11/site-packages/langchain/__init__.py", line 6, in <module>
from langchain.agents import MRKLChain, ReActChain, SelfAskWithSearchChain
File "/tmp/8db82251b0e58bc/antenv/lib/python3.11/site-packages/langchain/agents/__init__.py", line 2, in <module>
from langchain.agents.agent import (
File "/tmp/8db82251b0e58bc/antenv/lib/python3.11/site-packages/langchain/agents/agent.py", line 26, in <module>
from langchain.chains.base import Chain
File "/tmp/8db82251b0e58bc/antenv/lib/python3.11/site-packages/langchain/chains/__init__.py", line 2, in <module>
from langchain.chains.api.base import APIChain
File "/tmp/8db82251b0e58bc/antenv/lib/python3.11/site-packages/langchain/chains/api/base.py", line 13, in <module>
from langchain.chains.api.prompt import API_RESPONSE_PROMPT, API_URL_PROMPT
File "/tmp/8db82251b0e58bc/antenv/lib/python3.11/site-packages/langchain/chains/api/prompt.py", line 2, in <module>
from langchain.prompts.prompt import PromptTemplate
File "/tmp/8db82251b0e58bc/antenv/lib/python3.11/site-packages/langchain/prompts/__init__.py", line 12, in <module>
from langchain.prompts.example_selector import (
File "/tmp/8db82251b0e58bc/antenv/lib/python3.11/site-packages/langchain/prompts/example_selector/__init__.py", line 4, in <module>
from langchain.prompts.example_selector.semantic_similarity import (
File "/tmp/8db82251b0e58bc/antenv/lib/python3.11/site-packages/langchain/prompts/example_selector/semantic_similarity.py", line 8, in <module>
from langchain.embeddings.base import Embeddings
File "/tmp/8db82251b0e58bc/antenv/lib/python3.11/site-packages/langchain/embeddings/__init__.py", line 29, in <module>
from langchain.embeddings.sagemaker_endpoint import SagemakerEndpointEmbeddings
File "/tmp/8db82251b0e58bc/antenv/lib/python3.11/site-packages/langchain/embeddings/sagemaker_endpoint.py", line 7, in <module>
from langchain.llms.sagemaker_endpoint import ContentHandlerBase
File "/tmp/8db82251b0e58bc/antenv/lib/python3.11/site-packages/langchain/llms/__init__.py", line 52, in <module>
from langchain.llms.vertexai import VertexAI
File "/tmp/8db82251b0e58bc/antenv/lib/python3.11/site-packages/langchain/llms/vertexai.py", line 14, in <module>
from langchain.utilities.vertexai import (
File "/tmp/8db82251b0e58bc/antenv/lib/python3.11/site-packages/langchain/utilities/__init__.py", line 3, in <module>
from langchain.utilities.apify import ApifyWrapper
File "/tmp/8db82251b0e58bc/antenv/lib/python3.11/site-packages/langchain/utilities/apify.py", line 5, in <module>
from langchain.document_loaders import ApifyDatasetLoader
File "/tmp/8db82251b0e58bc/antenv/lib/python3.11/site-packages/langchain/document_loaders/__init__.py", line 43, in <module>
from langchain.document_loaders.embaas import EmbaasBlobLoader, EmbaasLoader
File "/tmp/8db82251b0e58bc/antenv/lib/python3.11/site-packages/langchain/document_loaders/embaas.py", line 54, in <module>
class BaseEmbaasLoader(BaseModel):
File "/tmp/8db82251b0e58bc/antenv/lib/python3.11/site-packages/pydantic/main.py", line 204, in __new__
fields[ann_name] = ModelField.infer(
^^^^^^^^^^^^^^^^^
File "/tmp/8db82251b0e58bc/antenv/lib/python3.11/site-packages/pydantic/fields.py", line 488, in infer
return cls(
^^^^
File "/tmp/8db82251b0e58bc/antenv/lib/python3.11/site-packages/pydantic/fields.py", line 419, in __init__
self.prepare()
File "/tmp/8db82251b0e58bc/antenv/lib/python3.11/site-packages/pydantic/fields.py", line 539, in prepare
self.populate_validators()
File "/tmp/8db82251b0e58bc/antenv/lib/python3.11/site-packages/pydantic/fields.py", line 801, in populate_validators
*(get_validators() if get_validators else list(find_validators(self.type_, self.model_config))),
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/tmp/8db82251b0e58bc/antenv/lib/python3.11/site-packages/pydantic/validators.py", line 696, in find_validators
yield make_typeddict_validator(type_, config)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/tmp/8db82251b0e58bc/antenv/lib/python3.11/site-packages/pydantic/validators.py", line 585, in make_typeddict_validator
TypedDictModel = create_model_from_typeddict(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/tmp/8db82251b0e58bc/antenv/lib/python3.11/site-packages/pydantic/annotated_types.py", line 35, in create_model_from_typeddict
return create_model(typeddict_cls.__name__, **kwargs, **field_definitions)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/tmp/8db82251b0e58bc/antenv/lib/python3.11/site-packages/pydantic/main.py", line 972, in create_model
return type(__model_name, __base__, namespace)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/tmp/8db82251b0e58bc/antenv/lib/python3.11/site-packages/pydantic/main.py", line 204, in __new__
fields[ann_name] = ModelField.infer(
^^^^^^^^^^^^^^^^^
File "/tmp/8db82251b0e58bc/antenv/lib/python3.11/site-packages/pydantic/fields.py", line 488, in infer
return cls(
^^^^
File "/tmp/8db82251b0e58bc/antenv/lib/python3.11/site-packages/pydantic/fields.py", line 419, in __init__
self.prepare()
File "/tmp/8db82251b0e58bc/antenv/lib/python3.11/site-packages/pydantic/fields.py", line 534, in prepare
self._type_analysis()
File "/tmp/8db82251b0e58bc/antenv/lib/python3.11/site-packages/pydantic/fields.py", line 638, in _type_analysis
elif issubclass(origin, Tuple): # type: ignore
^^^^^^^^^^^^^^^^^^^^^^^^^
File "/opt/python/3.11.3/lib/python3.11/typing.py", line 1570, in __subclasscheck__
return issubclass(cls, self.__origin__)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
I am not copying here the app script as the code works locally, I think its something more related to Azure App Service Plan Environment or the venv setup in the yaml file.