Skip to content

Commit a18edb1

Browse files
committed
Update version number to 1.11.0 and refers the documentation files to include Avro support.
1 parent f1813a7 commit a18edb1

File tree

24 files changed

+205
-24
lines changed

24 files changed

+205
-24
lines changed

.run/dqo run.run.xml

+1-1
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55
<option name="region" />
66
<option name="useCurrentConnection" value="false" />
77
</extension>
8-
<option name="JAR_PATH" value="$PROJECT_DIR$/dqops/target/dqo-dqops-1.10.2.jar" />
8+
<option name="JAR_PATH" value="$PROJECT_DIR$/dqops/target/dqo-dqops-1.11.0.jar" />
99
<option name="VM_PARAMETERS" value="-XX:MaxRAMPercentage=60.0 --add-opens java.base/java.nio=ALL-UNNAMED --add-opens java.base/java.util.concurrent=ALL-UNNAMED" />
1010
<option name="PROGRAM_PARAMETERS" value="--server.port=8888 --dqo.python.debug-mode=silent" />
1111
<option name="WORKING_DIRECTORY" value="$PROJECT_DIR$" />

VERSION

+1-1
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
1.10.2
1+
1.11.0

distribution/pom.xml

+1-1
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@
1111

1212
<groupId>com.dqops</groupId>
1313
<artifactId>dqo-distribution</artifactId>
14-
<version>1.10.2</version> <!-- DQOps Version, do not touch (changed automatically) -->
14+
<version>1.11.0</version> <!-- DQOps Version, do not touch (changed automatically) -->
1515
<name>dqo-distribution</name>
1616
<description>DQOps Data Quality Operations Center final assembly</description>
1717
<packaging>pom</packaging>

distribution/python/dqops/client/models/__init__.py

+2
Original file line numberDiff line numberDiff line change
@@ -60,6 +60,7 @@
6060
)
6161
from .authenticated_dashboard_model import AuthenticatedDashboardModel
6262
from .auto_import_tables_spec import AutoImportTablesSpec
63+
from .avro_file_format_spec import AvroFileFormatSpec
6364
from .aws_authentication_mode import AwsAuthenticationMode
6465
from .azure_authentication_mode import AzureAuthenticationMode
6566
from .between_floats_rule_parameters_spec import BetweenFloatsRuleParametersSpec
@@ -2154,6 +2155,7 @@
21542155
"AnomalyTimelinessDelayRuleWarning1PctParametersSpec",
21552156
"AuthenticatedDashboardModel",
21562157
"AutoImportTablesSpec",
2158+
"AvroFileFormatSpec",
21572159
"AwsAuthenticationMode",
21582160
"AzureAuthenticationMode",
21592161
"BetweenFloatsRuleParametersSpec",
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,58 @@
1+
from typing import Any, Dict, List, Type, TypeVar, Union
2+
3+
from attrs import define as _attrs_define
4+
from attrs import field as _attrs_field
5+
6+
from ..types import UNSET, Unset
7+
8+
T = TypeVar("T", bound="AvroFileFormatSpec")
9+
10+
11+
@_attrs_define
12+
class AvroFileFormatSpec:
13+
"""
14+
Attributes:
15+
filename (Union[Unset, bool]): Whether or not an extra filename column should be included in the result.
16+
"""
17+
18+
filename: Union[Unset, bool] = UNSET
19+
additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict)
20+
21+
def to_dict(self) -> Dict[str, Any]:
22+
filename = self.filename
23+
24+
field_dict: Dict[str, Any] = {}
25+
field_dict.update(self.additional_properties)
26+
field_dict.update({})
27+
if filename is not UNSET:
28+
field_dict["filename"] = filename
29+
30+
return field_dict
31+
32+
@classmethod
33+
def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
34+
d = src_dict.copy()
35+
filename = d.pop("filename", UNSET)
36+
37+
avro_file_format_spec = cls(
38+
filename=filename,
39+
)
40+
41+
avro_file_format_spec.additional_properties = d
42+
return avro_file_format_spec
43+
44+
@property
45+
def additional_keys(self) -> List[str]:
46+
return list(self.additional_properties.keys())
47+
48+
def __getitem__(self, key: str) -> Any:
49+
return self.additional_properties[key]
50+
51+
def __setitem__(self, key: str, value: Any) -> None:
52+
self.additional_properties[key] = value
53+
54+
def __delitem__(self, key: str) -> None:
55+
del self.additional_properties[key]
56+
57+
def __contains__(self, key: str) -> bool:
58+
return key in self.additional_properties

distribution/python/dqops/client/models/dqo_user_profile_model.py

+8
Original file line numberDiff line numberDiff line change
@@ -64,6 +64,7 @@ class DqoUserProfileModel:
6464
must be configured correctly and the user must have at least an EDITOR role.
6565
can_use_ai_anomaly_detection (Union[Unset, bool]): The DQOps instance is a paid version with advanced AI anomaly
6666
prediction.
67+
can_logout (Union[Unset, bool]): This instance uses federated authentication and the user can log out.
6768
"""
6869

6970
user: Union[Unset, str] = UNSET
@@ -98,6 +99,7 @@ class DqoUserProfileModel:
9899
can_use_data_domains: Union[Unset, bool] = UNSET
99100
can_synchronize_to_data_catalog: Union[Unset, bool] = UNSET
100101
can_use_ai_anomaly_detection: Union[Unset, bool] = UNSET
102+
can_logout: Union[Unset, bool] = UNSET
101103
additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict)
102104

103105
def to_dict(self) -> Dict[str, Any]:
@@ -138,6 +140,7 @@ def to_dict(self) -> Dict[str, Any]:
138140
can_use_data_domains = self.can_use_data_domains
139141
can_synchronize_to_data_catalog = self.can_synchronize_to_data_catalog
140142
can_use_ai_anomaly_detection = self.can_use_ai_anomaly_detection
143+
can_logout = self.can_logout
141144

142145
field_dict: Dict[str, Any] = {}
143146
field_dict.update(self.additional_properties)
@@ -212,6 +215,8 @@ def to_dict(self) -> Dict[str, Any]:
212215
)
213216
if can_use_ai_anomaly_detection is not UNSET:
214217
field_dict["can_use_ai_anomaly_detection"] = can_use_ai_anomaly_detection
218+
if can_logout is not UNSET:
219+
field_dict["can_logout"] = can_logout
215220

216221
return field_dict
217222

@@ -293,6 +298,8 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
293298

294299
can_use_ai_anomaly_detection = d.pop("can_use_ai_anomaly_detection", UNSET)
295300

301+
can_logout = d.pop("can_logout", UNSET)
302+
296303
dqo_user_profile_model = cls(
297304
user=user,
298305
tenant=tenant,
@@ -326,6 +333,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
326333
can_use_data_domains=can_use_data_domains,
327334
can_synchronize_to_data_catalog=can_synchronize_to_data_catalog,
328335
can_use_ai_anomaly_detection=can_use_ai_anomaly_detection,
336+
can_logout=can_logout,
329337
)
330338

331339
dqo_user_profile_model.additional_properties = d

distribution/python/dqops/client/models/duckdb_files_format_type.py

+1
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@
22

33

44
class DuckdbFilesFormatType(str, Enum):
5+
AVRO = "avro"
56
CSV = "csv"
67
DELTA_LAKE = "delta_lake"
78
ICEBERG = "iceberg"

distribution/python/dqops/client/models/duckdb_parameters_spec.py

+18
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,7 @@
1111
from ..types import UNSET, Unset
1212

1313
if TYPE_CHECKING:
14+
from ..models.avro_file_format_spec import AvroFileFormatSpec
1415
from ..models.csv_file_format_spec import CsvFileFormatSpec
1516
from ..models.delta_lake_file_format_spec import DeltaLakeFileFormatSpec
1617
from ..models.duckdb_parameters_spec_directories import (
@@ -40,6 +41,7 @@ class DuckdbParametersSpec:
4041
csv (Union[Unset, CsvFileFormatSpec]):
4142
json (Union[Unset, JsonFileFormatSpec]):
4243
parquet (Union[Unset, ParquetFileFormatSpec]):
44+
avro (Union[Unset, AvroFileFormatSpec]):
4345
iceberg (Union[Unset, IcebergFileFormatSpec]):
4446
delta_lake (Union[Unset, DeltaLakeFileFormatSpec]):
4547
directories (Union[Unset, DuckdbParametersSpecDirectories]): Virtual schema name to directory mappings. The path
@@ -70,6 +72,7 @@ class DuckdbParametersSpec:
7072
csv: Union[Unset, "CsvFileFormatSpec"] = UNSET
7173
json: Union[Unset, "JsonFileFormatSpec"] = UNSET
7274
parquet: Union[Unset, "ParquetFileFormatSpec"] = UNSET
75+
avro: Union[Unset, "AvroFileFormatSpec"] = UNSET
7376
iceberg: Union[Unset, "IcebergFileFormatSpec"] = UNSET
7477
delta_lake: Union[Unset, "DeltaLakeFileFormatSpec"] = UNSET
7578
directories: Union[Unset, "DuckdbParametersSpecDirectories"] = UNSET
@@ -111,6 +114,10 @@ def to_dict(self) -> Dict[str, Any]:
111114
if not isinstance(self.parquet, Unset):
112115
parquet = self.parquet.to_dict()
113116

117+
avro: Union[Unset, Dict[str, Any]] = UNSET
118+
if not isinstance(self.avro, Unset):
119+
avro = self.avro.to_dict()
120+
114121
iceberg: Union[Unset, Dict[str, Any]] = UNSET
115122
if not isinstance(self.iceberg, Unset):
116123
iceberg = self.iceberg.to_dict()
@@ -160,6 +167,8 @@ def to_dict(self) -> Dict[str, Any]:
160167
field_dict["json"] = json
161168
if parquet is not UNSET:
162169
field_dict["parquet"] = parquet
170+
if avro is not UNSET:
171+
field_dict["avro"] = avro
163172
if iceberg is not UNSET:
164173
field_dict["iceberg"] = iceberg
165174
if delta_lake is not UNSET:
@@ -191,6 +200,7 @@ def to_dict(self) -> Dict[str, Any]:
191200

192201
@classmethod
193202
def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
203+
from ..models.avro_file_format_spec import AvroFileFormatSpec
194204
from ..models.csv_file_format_spec import CsvFileFormatSpec
195205
from ..models.delta_lake_file_format_spec import DeltaLakeFileFormatSpec
196206
from ..models.duckdb_parameters_spec_directories import (
@@ -248,6 +258,13 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
248258
else:
249259
parquet = ParquetFileFormatSpec.from_dict(_parquet)
250260

261+
_avro = d.pop("avro", UNSET)
262+
avro: Union[Unset, AvroFileFormatSpec]
263+
if isinstance(_avro, Unset):
264+
avro = UNSET
265+
else:
266+
avro = AvroFileFormatSpec.from_dict(_avro)
267+
251268
_iceberg = d.pop("iceberg", UNSET)
252269
iceberg: Union[Unset, IcebergFileFormatSpec]
253270
if isinstance(_iceberg, Unset):
@@ -314,6 +331,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
314331
csv=csv,
315332
json=json,
316333
parquet=parquet,
334+
avro=avro,
317335
iceberg=iceberg,
318336
delta_lake=delta_lake,
319337
directories=directories,

distribution/python/dqops/client/models/file_format_spec.py

+18
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@
66
from ..types import UNSET, Unset
77

88
if TYPE_CHECKING:
9+
from ..models.avro_file_format_spec import AvroFileFormatSpec
910
from ..models.csv_file_format_spec import CsvFileFormatSpec
1011
from ..models.delta_lake_file_format_spec import DeltaLakeFileFormatSpec
1112
from ..models.iceberg_file_format_spec import IcebergFileFormatSpec
@@ -23,6 +24,7 @@ class FileFormatSpec:
2324
csv (Union[Unset, CsvFileFormatSpec]):
2425
json (Union[Unset, JsonFileFormatSpec]):
2526
parquet (Union[Unset, ParquetFileFormatSpec]):
27+
avro (Union[Unset, AvroFileFormatSpec]):
2628
iceberg (Union[Unset, IcebergFileFormatSpec]):
2729
delta_lake (Union[Unset, DeltaLakeFileFormatSpec]):
2830
file_paths (Union[Unset, List[str]]): The list of paths to files with data that are used as a source.
@@ -31,6 +33,7 @@ class FileFormatSpec:
3133
csv: Union[Unset, "CsvFileFormatSpec"] = UNSET
3234
json: Union[Unset, "JsonFileFormatSpec"] = UNSET
3335
parquet: Union[Unset, "ParquetFileFormatSpec"] = UNSET
36+
avro: Union[Unset, "AvroFileFormatSpec"] = UNSET
3437
iceberg: Union[Unset, "IcebergFileFormatSpec"] = UNSET
3538
delta_lake: Union[Unset, "DeltaLakeFileFormatSpec"] = UNSET
3639
file_paths: Union[Unset, List[str]] = UNSET
@@ -49,6 +52,10 @@ def to_dict(self) -> Dict[str, Any]:
4952
if not isinstance(self.parquet, Unset):
5053
parquet = self.parquet.to_dict()
5154

55+
avro: Union[Unset, Dict[str, Any]] = UNSET
56+
if not isinstance(self.avro, Unset):
57+
avro = self.avro.to_dict()
58+
5259
iceberg: Union[Unset, Dict[str, Any]] = UNSET
5360
if not isinstance(self.iceberg, Unset):
5461
iceberg = self.iceberg.to_dict()
@@ -70,6 +77,8 @@ def to_dict(self) -> Dict[str, Any]:
7077
field_dict["json"] = json
7178
if parquet is not UNSET:
7279
field_dict["parquet"] = parquet
80+
if avro is not UNSET:
81+
field_dict["avro"] = avro
7382
if iceberg is not UNSET:
7483
field_dict["iceberg"] = iceberg
7584
if delta_lake is not UNSET:
@@ -81,6 +90,7 @@ def to_dict(self) -> Dict[str, Any]:
8190

8291
@classmethod
8392
def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
93+
from ..models.avro_file_format_spec import AvroFileFormatSpec
8494
from ..models.csv_file_format_spec import CsvFileFormatSpec
8595
from ..models.delta_lake_file_format_spec import DeltaLakeFileFormatSpec
8696
from ..models.iceberg_file_format_spec import IcebergFileFormatSpec
@@ -109,6 +119,13 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
109119
else:
110120
parquet = ParquetFileFormatSpec.from_dict(_parquet)
111121

122+
_avro = d.pop("avro", UNSET)
123+
avro: Union[Unset, AvroFileFormatSpec]
124+
if isinstance(_avro, Unset):
125+
avro = UNSET
126+
else:
127+
avro = AvroFileFormatSpec.from_dict(_avro)
128+
112129
_iceberg = d.pop("iceberg", UNSET)
113130
iceberg: Union[Unset, IcebergFileFormatSpec]
114131
if isinstance(_iceberg, Unset):
@@ -129,6 +146,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
129146
csv=csv,
130147
json=json,
131148
parquet=parquet,
149+
avro=avro,
132150
iceberg=iceberg,
133151
delta_lake=delta_lake,
134152
file_paths=file_paths,

distribution/python/dqops/version.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -15,8 +15,8 @@
1515
# limit
1616

1717
# WARNING: the next two lines with the version numbers (VERSION =, PIP_VERSION =) should not be modified manually. They are changed by a maven profile at compile time.
18-
VERSION = "1.10.2"
19-
PIP_VERSION = "1.10.2"
18+
VERSION = "1.11.0"
19+
PIP_VERSION = "1.11.0"
2020
GITHUB_RELEASE = "v" + VERSION + ""
2121
JAVA_VERSION = "17"
2222

docs/client/models/environment.md

+1
Original file line numberDiff line numberDiff line change
@@ -73,6 +73,7 @@ The model that describes the current user and his access rights.
7373
|<span class="no-wrap-code">`can_use_data_domains`</span>|User can use data domains. Support for data domains requires an ENTERPRISE license of DQOps.|*boolean*|
7474
|<span class="no-wrap-code">`can_synchronize_to_data_catalog`</span>|User can synchronize data to a data catalog. The instance must be configured correctly and the user must have at least an EDITOR role.|*boolean*|
7575
|<span class="no-wrap-code">`can_use_ai_anomaly_detection`</span>|The DQOps instance is a paid version with advanced AI anomaly prediction.|*boolean*|
76+
|<span class="no-wrap-code">`can_logout`</span>|This instance uses federated authentication and the user can log out.|*boolean*|
7677

7778

7879
___

docs/client/operations/environment.md

+10-5
Original file line numberDiff line numberDiff line change
@@ -256,7 +256,8 @@ http://localhost:8888/api/environment/profile
256256
"can_change_own_password" : false,
257257
"can_use_data_domains" : false,
258258
"can_synchronize_to_data_catalog" : false,
259-
"can_use_ai_anomaly_detection" : false
259+
"can_use_ai_anomaly_detection" : false,
260+
"can_logout" : false
260261
}
261262
```
262263

@@ -305,7 +306,8 @@ http://localhost:8888/api/environment/profile
305306
can_change_own_password=False,
306307
can_use_data_domains=False,
307308
can_synchronize_to_data_catalog=False,
308-
can_use_ai_anomaly_detection=False
309+
can_use_ai_anomaly_detection=False,
310+
can_logout=False
309311
)
310312
```
311313

@@ -355,7 +357,8 @@ http://localhost:8888/api/environment/profile
355357
can_change_own_password=False,
356358
can_use_data_domains=False,
357359
can_synchronize_to_data_catalog=False,
358-
can_use_ai_anomaly_detection=False
360+
can_use_ai_anomaly_detection=False,
361+
can_logout=False
359362
)
360363
```
361364

@@ -408,7 +411,8 @@ http://localhost:8888/api/environment/profile
408411
can_change_own_password=False,
409412
can_use_data_domains=False,
410413
can_synchronize_to_data_catalog=False,
411-
can_use_ai_anomaly_detection=False
414+
can_use_ai_anomaly_detection=False,
415+
can_logout=False
412416
)
413417
```
414418

@@ -461,7 +465,8 @@ http://localhost:8888/api/environment/profile
461465
can_change_own_password=False,
462466
can_use_data_domains=False,
463467
can_synchronize_to_data_catalog=False,
464-
can_use_ai_anomaly_detection=False
468+
can_use_ai_anomaly_detection=False,
469+
can_logout=False
465470
)
466471
```
467472

docs/command-line-interface/connection.md

+2-2
Original file line numberDiff line numberDiff line change
@@ -307,7 +307,7 @@ All parameters supported by the command are listed below.
307307
|<div id="connection add--duckdb-azure-tenant-id" class="no-wrap-code">`--duckdb-azure-tenant-id`</div>|Azure Tenant ID used by DuckDB Secret Manager. The value can be in the null format to use dynamic substitution.| ||
308308
|<div id="connection add--duckdb-database" class="no-wrap-code">`--duckdb-database`</div>|DuckDB database name for in-memory read mode. The value can be in the null format to use dynamic substitution.| ||
309309
|<div id="connection add--duckdb-directories" class="no-wrap-code">`--duckdb-directories`</div>|Virtual schema name to directory mappings. The path must be an absolute path.| ||
310-
|<div id="connection add--duckdb-files-format-type" class="no-wrap-code">`--duckdb-files-format-type`</div>|Type of source files format for DuckDB.| |*csv*<br/>*json*<br/>*parquet*<br/>*iceberg*<br/>*delta_lake*<br/>|
310+
|<div id="connection add--duckdb-files-format-type" class="no-wrap-code">`--duckdb-files-format-type`</div>|Type of source files format for DuckDB.| |*csv*<br/>*json*<br/>*parquet*<br/>*avro*<br/>*iceberg*<br/>*delta_lake*<br/>|
311311
|<div id="connection add--duckdb-password" class="no-wrap-code">`--duckdb-password`</div>|DuckDB password for a remote storage type. The value can be in the null format to use dynamic substitution.| ||
312312
|<div id="connection add--duckdb-read-mode" class="no-wrap-code">`--duckdb-read-mode`</div>|DuckDB read mode.| |*in_memory*<br/>*files*<br/>|
313313
|<div id="connection add--duckdb-region" class="no-wrap-code">`--duckdb-region`</div>|The region for the storage credentials. The value can be in the null format to use dynamic substitution.| ||
@@ -722,7 +722,7 @@ All parameters supported by the command are listed below.
722722
|<div id="connection update--duckdb-azure-tenant-id" class="no-wrap-code">`--duckdb-azure-tenant-id`</div>|Azure Tenant ID used by DuckDB Secret Manager. The value can be in the null format to use dynamic substitution.| ||
723723
|<div id="connection update--duckdb-database" class="no-wrap-code">`--duckdb-database`</div>|DuckDB database name for in-memory read mode. The value can be in the null format to use dynamic substitution.| ||
724724
|<div id="connection update--duckdb-directories" class="no-wrap-code">`--duckdb-directories`</div>|Virtual schema name to directory mappings. The path must be an absolute path.| ||
725-
|<div id="connection update--duckdb-files-format-type" class="no-wrap-code">`--duckdb-files-format-type`</div>|Type of source files format for DuckDB.| |*csv*<br/>*json*<br/>*parquet*<br/>*iceberg*<br/>*delta_lake*<br/>|
725+
|<div id="connection update--duckdb-files-format-type" class="no-wrap-code">`--duckdb-files-format-type`</div>|Type of source files format for DuckDB.| |*csv*<br/>*json*<br/>*parquet*<br/>*avro*<br/>*iceberg*<br/>*delta_lake*<br/>|
726726
|<div id="connection update--duckdb-password" class="no-wrap-code">`--duckdb-password`</div>|DuckDB password for a remote storage type. The value can be in the null format to use dynamic substitution.| ||
727727
|<div id="connection update--duckdb-read-mode" class="no-wrap-code">`--duckdb-read-mode`</div>|DuckDB read mode.| |*in_memory*<br/>*files*<br/>|
728728
|<div id="connection update--duckdb-region" class="no-wrap-code">`--duckdb-region`</div>|The region for the storage credentials. The value can be in the null format to use dynamic substitution.| ||

0 commit comments

Comments
 (0)