@@ -4,7 +4,13 @@ param location string = resourceGroup().location
4
4
@description ('Tags that will be applied to all resources' )
5
5
param tags object = {}
6
6
7
+ @description ('The configuration for the LlamaIndex application' )
8
+ param llamaIndexConfig object = {}
7
9
10
+
11
+ var principalType = isContinuousIntegration ? 'ServicePrincipal' : 'User'
12
+
13
+ param isContinuousIntegration bool
8
14
param llamaIndexJavascriptExists bool
9
15
10
16
@description ('Id of the user or app to assign application roles' )
@@ -100,6 +106,62 @@ module llamaIndexJavascript 'br/public:avm/res/app/container-app:0.8.0' = {
100
106
name : 'PORT'
101
107
value : '3000'
102
108
}
109
+ {
110
+ name : 'AZURE_OPENAI_ENDPOINT'
111
+ value : openAi .outputs .endpoint
112
+ }
113
+ {
114
+ name : 'AZURE_DEPLOYMENT_NAME'
115
+ value : llamaIndexConfig .chat .deployment
116
+ }
117
+ {
118
+ name : 'AZURE_OPENAI_API_VERSION'
119
+ value : llamaIndexConfig .openai_api_version
120
+ }
121
+ {
122
+ name : 'MODEL_PROVIDER'
123
+ value : llamaIndexConfig .model_provider
124
+ }
125
+ {
126
+ name : 'MODEL'
127
+ value : llamaIndexConfig .chat .model
128
+ }
129
+ {
130
+ name : 'EMBEDDING_MODEL'
131
+ value : llamaIndexConfig .embedding .model
132
+ }
133
+ {
134
+ name : 'EMBEDDING_DIM'
135
+ value : llamaIndexConfig .embedding .dim
136
+ }
137
+ {
138
+ name : 'LLM_TEMPERATURE'
139
+ value : llamaIndexConfig .llm_temperature
140
+ }
141
+ {
142
+ name : 'LLM_MAX_TOKENS'
143
+ value : llamaIndexConfig .llm_max_tokens
144
+ }
145
+ {
146
+ name : 'TOP_K'
147
+ value : llamaIndexConfig .top_k
148
+ }
149
+ {
150
+ name : 'FILESERVER_URL_PREFIX'
151
+ value : llamaIndexConfig .fileserver_url_prefix
152
+ }
153
+ {
154
+ name : 'SYSTEM_PROMPT'
155
+ value : llamaIndexConfig .system_prompt
156
+ }
157
+ {
158
+ name : 'OPENAI_API_TYPE'
159
+ value : 'AzureOpenAI'
160
+ }
161
+ {
162
+ name : 'STORAGE_CACHE_DIR'
163
+ value : './cache'
164
+ }
103
165
]
104
166
}
105
167
]
@@ -118,5 +180,58 @@ module llamaIndexJavascript 'br/public:avm/res/app/container-app:0.8.0' = {
118
180
tags : union (tags , { 'azd-service-name' : 'llama-index-javascript' })
119
181
}
120
182
}
183
+
184
+ module openAi 'br/public:avm/res/cognitive-services/account:0.10.2' = {
185
+ name : 'openai'
186
+ params : {
187
+ name : '${abbrs .cognitiveServicesAccounts }${resourceToken }'
188
+ tags : tags
189
+ location : location
190
+ kind : 'OpenAI'
191
+ disableLocalAuth : true
192
+ customSubDomainName : '${abbrs .cognitiveServicesAccounts }${resourceToken }'
193
+ publicNetworkAccess : 'Enabled'
194
+ deployments : [
195
+ {
196
+ name : llamaIndexConfig .chat .deployment
197
+ model : {
198
+ format : 'OpenAI'
199
+ name : llamaIndexConfig .chat .model
200
+ version : llamaIndexConfig .chat .version
201
+ }
202
+ sku : {
203
+ capacity : llamaIndexConfig .chat .capacity
204
+ name : 'GlobalStandard'
205
+ }
206
+ }
207
+ {
208
+ name : llamaIndexConfig .embedding .deployment
209
+ model : {
210
+ format : 'OpenAI'
211
+ name : llamaIndexConfig .embedding .model
212
+ version : llamaIndexConfig .embedding .version
213
+ }
214
+ sku : {
215
+ capacity : llamaIndexConfig .embedding .capacity
216
+ name : 'Standard'
217
+ }
218
+ }
219
+ ]
220
+ roleAssignments : [
221
+ {
222
+ principalId : principalId
223
+ principalType : principalType
224
+ roleDefinitionIdOrName : 'Cognitive Services OpenAI User'
225
+ }
226
+ {
227
+ principalId : llamaIndexJavascriptIdentity .outputs .principalId
228
+ principalType : 'ServicePrincipal'
229
+ roleDefinitionIdOrName : 'Cognitive Services OpenAI User'
230
+ }
231
+ ]
232
+ }
233
+ }
234
+
121
235
output AZURE_CONTAINER_REGISTRY_ENDPOINT string = containerRegistry .outputs .loginServer
122
236
output AZURE_RESOURCE_LLAMA_INDEX_JAVASCRIPT_ID string = llamaIndexJavascript .outputs .resourceId
237
+ output AZURE_OPENAI_ENDPOINT string = openAi .outputs .endpoint
0 commit comments